text
stringlengths 4
1.02M
| meta
dict |
|---|---|
import sys
from tqdm import tqdm
from csv import DictWriter
import argparse
from datetime import date, datetime
from django.core.management.base import BaseCommand
from catalog.elastic_models import NACPDeclaration
class Command(BaseCommand):
help = "Export submission report as CSV"
def add_arguments(self, parser):
parser.add_argument(
"--outfile", nargs="?", type=argparse.FileType("w"), default=sys.stdout
)
parser.add_argument("--year_since", type=int, default=2015)
def handle(self, *args, **options):
all_decls = (
NACPDeclaration.search()
.query("match_all")
.source(
[
"declaration.url",
"intro.date",
"intro.doc_type",
"nacp_orig.step_1",
]
)
)
all_decls = all_decls.filter(
"range",
intro__date={
"gte": date(options["year_since"], 1, 1),
"lt": datetime.now().replace(hour=0, minute=0, second=0, microsecond=0),
},
)
w = DictWriter(
options["outfile"],
fieldnames=[
"id",
"declaration.url",
"intro.date",
"intro.doc_type",
"nacp_orig.step_1.postCategory",
"nacp_orig.step_1.postType",
],
)
for decl in tqdm(all_decls.scan(), total=all_decls.count()):
w.writerow(
{
"id": decl.meta.id,
"declaration.url": decl.declaration.url,
"intro.date": decl.intro.date.date(),
"intro.doc_type": decl.intro.doc_type,
"nacp_orig.step_1.postCategory": getattr(
decl.nacp_orig.step_1, "postCategory", ""
),
"nacp_orig.step_1.postType": getattr(
decl.nacp_orig.step_1, "postType", ""
),
}
)
|
{
"content_hash": "b2bffc721fb5dd6afc15f34e4c36d671",
"timestamp": "",
"source": "github",
"line_count": 67,
"max_line_length": 88,
"avg_line_length": 31.62686567164179,
"alnum_prop": 0.465785747994337,
"repo_name": "dchaplinsky/declarations.com.ua",
"id": "e3d881d65095d603c29b5928418989ff5056c1ae",
"size": "2119",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "declarations_site/catalog/management/commands/export_submission_report.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "3235"
},
{
"name": "HTML",
"bytes": "3593"
},
{
"name": "JavaScript",
"bytes": "1559768"
},
{
"name": "Jinja",
"bytes": "591048"
},
{
"name": "Python",
"bytes": "561374"
},
{
"name": "SCSS",
"bytes": "546001"
},
{
"name": "Shell",
"bytes": "1517"
}
],
"symlink_target": ""
}
|
import unittest
from slack_sdk.oauth.installation_store import Installation, FileInstallationStore
from slack_sdk.oauth.installation_store.internals import _from_iso_format_to_datetime
class TestFile(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_iso_format(self):
dt = _from_iso_format_to_datetime("2021-07-14 08:00:17")
self.assertEqual(dt.timestamp(), 1626249617.0)
|
{
"content_hash": "c9db41d682548fb7d36c48c12fdc534a",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 85,
"avg_line_length": 27.9375,
"alnum_prop": 0.7136465324384788,
"repo_name": "slackapi/python-slackclient",
"id": "5d0f3d4356e5ec3c473cac9852cc5d0b4d4f0c3f",
"size": "447",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "tests/slack_sdk/oauth/installation_store/test_internals.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "7756"
},
{
"name": "HTML",
"bytes": "5961"
},
{
"name": "Makefile",
"bytes": "7656"
},
{
"name": "Python",
"bytes": "360940"
},
{
"name": "Shell",
"bytes": "110"
}
],
"symlink_target": ""
}
|
"""
A container file
"""
from NetCatKS.NetCAT.api.implementers.twisted.services import *
from NetCatKS.NetCAT.api.implementers.twisted.protocols import *
from NetCatKS.NetCAT.api.implementers.twisted.factories import *
from NetCatKS.NetCAT.api.implementers.twisted.resources import *
__author__ = 'dimd'
|
{
"content_hash": "8c1a0e5275f367feb61d066992656424",
"timestamp": "",
"source": "github",
"line_count": 12,
"max_line_length": 64,
"avg_line_length": 25.666666666666668,
"alnum_prop": 0.788961038961039,
"repo_name": "dimddev/NetCatKS",
"id": "9a852e55ff94d7080f15546804df72ca9cb6cb9e",
"size": "308",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NetCatKS/NetCAT/api/implementers/twisted/__init__.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Python",
"bytes": "182697"
}
],
"symlink_target": ""
}
|
"""Tests for Model subclassing."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
import six
from tensorflow.python import keras
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.eager import context
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.platform import test
from tensorflow.python.training.checkpointable import data_structures
from tensorflow.python.training.rmsprop import RMSPropOptimizer
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
# pylint: disable=not-callable
class SimpleTestModel(keras.Model):
def __init__(self, use_bn=False, use_dp=False, num_classes=10):
super(SimpleTestModel, self).__init__(name='test_model')
self.use_bn = use_bn
self.use_dp = use_dp
self.num_classes = num_classes
self.dense1 = keras.layers.Dense(32, activation='relu')
self.dense2 = keras.layers.Dense(num_classes, activation='softmax')
if self.use_dp:
self.dp = keras.layers.Dropout(0.5)
if self.use_bn:
self.bn = keras.layers.BatchNormalization(axis=-1)
def call(self, x):
x = self.dense1(x)
if self.use_dp:
x = self.dp(x)
if self.use_bn:
x = self.bn(x)
return self.dense2(x)
class SimpleConvTestModel(keras.Model):
def __init__(self, num_classes=10):
super(SimpleConvTestModel, self).__init__(name='test_model')
self.num_classes = num_classes
self.conv1 = keras.layers.Conv2D(32, (3, 3), activation='relu')
self.flatten = keras.layers.Flatten()
self.dense1 = keras.layers.Dense(num_classes, activation='softmax')
def call(self, x):
x = self.conv1(x)
x = self.flatten(x)
return self.dense1(x)
class MultiIOTestModel(keras.Model):
def __init__(self, use_bn=False, use_dp=False, num_classes=(2, 3)):
super(MultiIOTestModel, self).__init__(name='test_model')
self.use_bn = use_bn
self.use_dp = use_dp
self.num_classes = num_classes
self.dense1 = keras.layers.Dense(32, activation='relu')
self.dense2 = keras.layers.Dense(num_classes[0], activation='softmax')
self.dense3 = keras.layers.Dense(num_classes[1], activation='softmax')
if use_dp:
self.dp = keras.layers.Dropout(0.5)
if use_bn:
self.bn = keras.layers.BatchNormalization()
def call(self, inputs):
x1, x2 = inputs
x1 = self.dense1(x1)
x2 = self.dense1(x2)
if self.use_dp:
x1 = self.dp(x1)
if self.use_bn:
x2 = self.bn(x2)
return [self.dense2(x1), self.dense3(x2)]
class NestedTestModel1(keras.Model):
"""A model subclass nested inside a model subclass.
"""
def __init__(self, num_classes=2):
super(NestedTestModel1, self).__init__(name='nested_model_1')
self.num_classes = num_classes
self.dense1 = keras.layers.Dense(32, activation='relu')
self.dense2 = keras.layers.Dense(num_classes, activation='relu')
self.bn = keras.layers.BatchNormalization()
self.test_net = SimpleTestModel(num_classes=4,
use_bn=True,
use_dp=True)
def call(self, inputs):
x = self.dense1(inputs)
x = self.bn(x)
x = self.test_net(x)
return self.dense2(x)
def get_functional_graph_model(input_dim, num_classes):
# A simple functional-API model (a.k.a. graph network)
inputs = keras.Input(shape=(input_dim,))
x = keras.layers.Dense(32, activation='relu')(inputs)
x = keras.layers.BatchNormalization()(x)
outputs = keras.layers.Dense(num_classes)(x)
return keras.Model(inputs, outputs)
class NestedTestModel2(keras.Model):
"""A model subclass with a functional-API graph network inside.
"""
def __init__(self, num_classes=2):
super(NestedTestModel2, self).__init__(name='nested_model_2')
self.num_classes = num_classes
self.dense1 = keras.layers.Dense(32, activation='relu')
self.dense2 = keras.layers.Dense(num_classes, activation='relu')
self.bn = self.bn = keras.layers.BatchNormalization()
self.test_net = get_functional_graph_model(32, 4)
def call(self, inputs):
x = self.dense1(inputs)
x = self.bn(x)
x = self.test_net(x)
return self.dense2(x)
def get_nested_model_3(input_dim, num_classes):
# A functional-API model with a subclassed model inside.
# NOTE: this requires the inner subclass to implement `compute_output_shape`.
inputs = keras.Input(shape=(input_dim,))
x = keras.layers.Dense(32, activation='relu')(inputs)
x = keras.layers.BatchNormalization()(x)
class Inner(keras.Model):
def __init__(self):
super(Inner, self).__init__()
self.dense1 = keras.layers.Dense(32, activation='relu')
self.dense2 = keras.layers.Dense(5, activation='relu')
self.bn = keras.layers.BatchNormalization()
def call(self, inputs):
x = self.dense1(inputs)
x = self.dense2(x)
return self.bn(x)
test_model = Inner()
x = test_model(x)
outputs = keras.layers.Dense(num_classes)(x)
return keras.Model(inputs, outputs, name='nested_model_3')
class ModelSubclassingTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_custom_build(self):
class DummyModel(keras.Model):
def __init__(self):
super(DummyModel, self).__init__()
self.dense1 = keras.layers.Dense(32, activation='relu')
self.uses_custom_build = False
def call(self, inputs):
return self.dense1(inputs)
def build(self, input_shape):
self.uses_custom_build = True
test_model = DummyModel()
dummy_data = array_ops.ones((32, 50))
test_model(dummy_data)
self.assertTrue(test_model.uses_custom_build, 'Model should use user '
'defined build when called.')
@test_util.run_in_graph_and_eager_modes
def test_invalid_input_shape_build(self):
num_classes = 2
input_dim = 50
model = SimpleTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
with self.assertRaisesRegexp(
ValueError, 'input shape is not one of the valid types'):
model.build(input_shape=tensor_shape.Dimension(input_dim))
@test_util.run_in_graph_and_eager_modes
def test_embed_dtype_with_subclass_build(self):
class Embedding(keras.layers.Layer):
"""An Embedding layer."""
def __init__(self, vocab_size, embedding_dim, **kwargs):
super(Embedding, self).__init__(**kwargs)
self.vocab_size = vocab_size
self.embedding_dim = embedding_dim
def build(self, _):
self.embedding = self.add_variable(
'embedding_kernel',
shape=[self.vocab_size, self.embedding_dim],
dtype=np.float32,
initializer=init_ops.random_uniform_initializer(-0.1, 0.1),
trainable=True)
def call(self, x):
return embedding_ops.embedding_lookup(self.embedding, x)
class EmbedModel(keras.Model):
def __init__(self, vocab_size, embed_size):
super(EmbedModel, self).__init__()
self.embed1 = Embedding(vocab_size, embed_size)
def call(self, inputs):
return self.embed1(inputs)
model = EmbedModel(100, 20)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
with self.assertRaisesRegexp(
ValueError, 'if your layers do not support float type inputs'):
model.build(input_shape=(35, 20))
@test_util.run_in_graph_and_eager_modes
def test_single_time_step_rnn_build(self):
dim = 4
timesteps = 1
batch_input_shape = (None, timesteps, dim)
units = 3
class SimpleRNNModel(keras.Model):
def __init__(self):
super(SimpleRNNModel, self).__init__()
self.lstm = keras.layers.LSTM(units)
def call(self, inputs):
return self.lstm(inputs)
model = SimpleRNNModel()
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build(batch_input_shape)
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
model(array_ops.ones((32, timesteps, dim)))
@test_util.run_in_graph_and_eager_modes
def test_single_io_subclass_build(self):
num_classes = 2
input_dim = 50
batch_size = None
model = SimpleTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build(input_shape=(batch_size, input_dim))
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
model(array_ops.ones((32, input_dim)))
@test_util.run_in_graph_and_eager_modes
def test_single_io_dimension_subclass_build(self):
num_classes = 2
input_dim = tensor_shape.Dimension(50)
batch_size = tensor_shape.Dimension(None)
model = SimpleTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build(input_shape=(batch_size, input_dim))
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
model(array_ops.ones((32, input_dim)))
@test_util.run_in_graph_and_eager_modes
def test_multidim_io_subclass_build(self):
num_classes = 10
# Input size, e.g. image
batch_size = 32
input_shape = (32, 32, 3)
model = SimpleConvTestModel(num_classes)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
batch_input_shape = (batch_size,) + input_shape
model.build(input_shape=batch_input_shape)
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
model(array_ops.ones(batch_input_shape))
@test_util.run_in_graph_and_eager_modes
def test_tensorshape_io_subclass_build(self):
num_classes = 10
# Input size, e.g. image
batch_size = None
input_shape = (32, 32, 3)
model = SimpleConvTestModel(num_classes)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build(
input_shape=tensor_shape.TensorShape((batch_size,) + input_shape))
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
model(array_ops.ones((32,) + input_shape))
def test_subclass_save_model(self):
num_classes = 10
# Input size, e.g. image
batch_size = None
input_shape = (32, 32, 3)
model = SimpleConvTestModel(num_classes)
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build(
input_shape=tensor_shape.TensorShape((batch_size,) + input_shape))
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
weights = model.get_weights()
tf_format_name = os.path.join(self.get_temp_dir(), 'ckpt')
model.save_weights(tf_format_name)
if h5py is not None:
hdf5_format_name = os.path.join(self.get_temp_dir(), 'weights.h5')
model.save_weights(hdf5_format_name)
model = SimpleConvTestModel(num_classes)
model.build(
input_shape=tensor_shape.TensorShape((batch_size,) + input_shape))
if h5py is not None:
model.load_weights(hdf5_format_name)
self.assertAllClose(weights, model.get_weights())
model.load_weights(tf_format_name)
self.assertAllClose(weights, model.get_weights())
@test_util.run_in_graph_and_eager_modes
def test_multi_io_subclass_build(self):
batch_size = None
num_samples = 1000
input_dim = 50
model = MultiIOTestModel()
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
batch_input_shape = tensor_shape.TensorShape((batch_size, input_dim))
model.build(
input_shape=[batch_input_shape, batch_input_shape])
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
x1 = array_ops.ones((num_samples, input_dim))
x2 = array_ops.ones((num_samples, input_dim))
model([x1, x2])
@test_util.run_in_graph_and_eager_modes
def test_single_io_workflow_with_np_arrays(self):
num_classes = 2
num_samples = 100
input_dim = 50
model = SimpleTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
model.compile(
loss='mse',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=['acc', keras.metrics.CategoricalAccuracy()])
x = np.ones((num_samples, input_dim))
y = np.zeros((num_samples, num_classes))
model.fit(x, y, epochs=2, batch_size=32, verbose=0)
_ = model.evaluate(x, y, verbose=0)
@test_util.run_in_graph_and_eager_modes
def test_multi_io_workflow_with_np_arrays(self):
num_classes = (2, 3)
num_samples = 1000
input_dim = 50
model = MultiIOTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
model.compile(loss='mse',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=['acc'])
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
model.fit([x1, x2], [y1, y2], epochs=2, batch_size=32, verbose=0)
_ = model.evaluate([x1, x2], [y1, y2], verbose=0)
def test_single_io_workflow_with_tensors(self):
num_classes = 2
num_samples = 10
input_dim = 50
with self.cached_session():
model = SimpleTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
x = array_ops.ones((num_samples, input_dim))
y = array_ops.zeros((num_samples, num_classes))
model.fit(x, y, epochs=2, steps_per_epoch=10, verbose=0)
_ = model.evaluate(steps=10, verbose=0)
def test_multi_io_workflow_with_tensors(self):
num_classes = (2, 3)
num_samples = 10
input_dim = 50
with self.cached_session():
model = MultiIOTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
x1 = array_ops.ones((num_samples, input_dim))
x2 = array_ops.ones((num_samples, input_dim))
y1 = array_ops.zeros((num_samples, num_classes[0]))
y2 = array_ops.zeros((num_samples, num_classes[1]))
model.fit([x1, x2], [y1, y2], epochs=2, steps_per_epoch=10, verbose=0)
_ = model.evaluate(steps=10, verbose=0)
@test_util.run_in_graph_and_eager_modes
def test_single_io_workflow_with_dataset_iterators(self):
num_classes = 2
num_samples = 10
input_dim = 50
with self.cached_session():
model = SimpleTestModel(num_classes=num_classes, use_dp=True, use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
x = np.ones((num_samples, input_dim))
y = np.zeros((num_samples, num_classes))
dataset = dataset_ops.Dataset.from_tensor_slices((x, y))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
iterator = dataset.make_one_shot_iterator()
model.fit(iterator, epochs=2, steps_per_epoch=10, verbose=0)
_ = model.evaluate(iterator, steps=10, verbose=0)
def test_multi_io_workflow_with_numpy_arrays_and_custom_placeholders(self):
num_classes = (2, 3)
num_samples = 1000
input_dim = 50
with self.cached_session():
model = MultiIOTestModel(num_classes=num_classes,
use_dp=True,
use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
x2_placeholder = array_ops.placeholder(
dtype='float32', shape=(None, input_dim))
model._set_inputs([x1, x2_placeholder])
model.fit([x1, x2], [y1, y2], epochs=2, batch_size=32, verbose=0)
_ = model.evaluate([x1, x2], [y1, y2], verbose=0)
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def test_attributes(self):
# layers, weights, trainable_weights, non_trainable_weights, inputs, outputs
num_classes = (2, 3)
num_samples = 100
input_dim = 50
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
self.assertEqual(model.name, 'test_model')
self.assertEqual(model.built, False)
self.assertEqual(len(model.weights), 0)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
model.train_on_batch([x1, x2], [y1, y2])
self.assertEqual(model.built, True)
self.assertEqual(len(model.layers), 4)
self.assertEqual(len(model.weights), 10)
self.assertEqual(len(model.trainable_weights), 8)
self.assertEqual(len(model.non_trainable_weights), 2)
self.assertEqual(len(model.inputs), 2)
self.assertEqual(len(model.outputs), 2)
@test_util.run_in_graph_and_eager_modes
def test_updates(self):
# test that updates get run during training
num_samples = 100
input_dim = 50
class BNNet(keras.Model):
def __init__(self):
super(BNNet, self).__init__()
self.bn = keras.layers.BatchNormalization(beta_initializer='ones',
gamma_initializer='ones')
def call(self, inputs):
return self.bn(inputs)
x = np.ones((num_samples, input_dim))
y = np.ones((num_samples, input_dim))
model = BNNet()
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
y_ref = model.predict(x)
model.train_on_batch(x, y)
y_new = model.predict(x)
self.assertGreater(np.sum(np.abs(y_ref - y_new)), 0.1)
def test_updates_and_losses_for_nested_models_in_subclassed_model(self):
# Case 1: deferred-build sequential nested in subclass.
class TestModel1(keras.Model):
def __init__(self):
super(TestModel1, self).__init__()
self.fc = keras.layers.Dense(10, input_shape=(784,),
activity_regularizer='l1')
self.bn = keras.Sequential([keras.layers.BatchNormalization(axis=1)])
def call(self, x):
return self.bn(self.fc(x))
with self.cached_session():
model = TestModel1()
x = array_ops.ones(shape=[100, 784], dtype='float32')
model(x)
self.assertEqual(len(model.get_updates_for(x)), 2)
self.assertEqual(len(model.get_losses_for(x)), 1)
# Case 2: placeholder-sequential nested in subclass.
class TestModel2(keras.Model):
def __init__(self):
super(TestModel2, self).__init__()
self.fc = keras.layers.Dense(10, input_shape=(784,),
activity_regularizer='l1')
self.bn = keras.Sequential(
[keras.layers.BatchNormalization(axis=1, input_shape=(10,))])
def call(self, x):
return self.bn(self.fc(x))
with self.cached_session():
model = TestModel2()
x = array_ops.ones(shape=[100, 784], dtype='float32')
model(x)
self.assertEqual(len(model.get_updates_for(x)), 2)
self.assertEqual(len(model.get_losses_for(x)), 1)
# Case 3: functional-API model nested in subclass.
inputs = keras.Input((10,))
outputs = keras.layers.BatchNormalization(axis=1)(inputs)
bn = keras.Model(inputs, outputs)
class TestModel3(keras.Model):
def __init__(self):
super(TestModel3, self).__init__()
self.fc = keras.layers.Dense(10, input_shape=(784,),
activity_regularizer='l1')
self.bn = bn
def call(self, x):
return self.bn(self.fc(x))
with self.cached_session():
model = TestModel3()
x = array_ops.ones(shape=[100, 784], dtype='float32')
model(x)
self.assertEqual(len(model.get_updates_for(x)), 2)
self.assertEqual(len(model.get_losses_for(x)), 1)
@test_util.run_in_graph_and_eager_modes
def test_training_and_inference_behavior(self):
# test that dropout is applied in training and not inference
num_samples = 100
input_dim = 50
class DPNet(keras.Model):
def __init__(self):
super(DPNet, self).__init__()
self.dp = keras.layers.Dropout(0.5)
self.dense = keras.layers.Dense(1,
use_bias=False,
kernel_initializer='ones')
def call(self, inputs):
x = self.dp(inputs)
return self.dense(x)
model = DPNet()
x = np.ones((num_samples, input_dim))
y = model.predict(x)
self.assertEqual(np.sum(y), np.sum(x))
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
loss = model.train_on_batch(x, y)
self.assertGreater(loss, 0.1)
@test_util.run_in_graph_and_eager_modes
def test_training_methods(self):
# test fit, train_on_batch
# on different input types: list, dict
num_classes = (2, 3)
num_samples = 100
input_dim = 50
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
model.fit([x1, x2], [y1, y2], epochs=2, batch_size=32, verbose=0)
model.fit({'input_1': x1, 'input_2': x2},
{'output_1': y1, 'output_2': y2},
epochs=2, batch_size=32)
model.fit([x1, x2], [y1, y2], epochs=2, batch_size=32, verbose=0,
validation_data=([x1, x2], [y1, y2]))
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
model.train_on_batch([x1, x2], [y1, y2])
model.train_on_batch({'input_1': x1, 'input_2': x2},
{'output_1': y1, 'output_2': y2})
@test_util.run_in_graph_and_eager_modes(assert_no_eager_garbage=True)
def test_inference_methods(self):
# test predict, evaluate, test_on_batch, predict_on_batch
# on different input types: list, dict
num_classes = (2, 3)
num_samples = 100
input_dim = 50
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
model.evaluate([x1, x2], [y1, y2])
model.test_on_batch([x1, x2], [y1, y2])
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.predict([x1, x2])
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.predict_on_batch([x1, x2])
@test_util.run_in_graph_and_eager_modes
def test_trainable_mutation(self):
# test that you can change `trainable` on a model or layer, and that
# it freezes the model state during training
# TODO(fchollet): add test after we unify BN behavior in eager and symbolic.
pass
@test_util.run_in_graph_and_eager_modes
def test_saving(self):
num_classes = (2, 3)
num_samples = 100
input_dim = 50
x1 = np.ones((num_samples, input_dim))
x2 = np.ones((num_samples, input_dim))
y1 = np.zeros((num_samples, num_classes[0]))
y2 = np.zeros((num_samples, num_classes[1]))
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
model.fit([x1, x2], [y1, y2], epochs=2, batch_size=32, verbose=0)
y_ref_1, y_ref_2 = model.predict([x1, x2])
tf_format_name = os.path.join(self.get_temp_dir(), 'ckpt')
model.save_weights(tf_format_name)
if h5py is not None:
hdf5_format_name = os.path.join(self.get_temp_dir(), 'weights.h5')
model.save_weights(hdf5_format_name)
model = MultiIOTestModel(num_classes=num_classes, use_bn=True)
if h5py is not None:
with self.assertRaises(ValueError):
model.load_weights(hdf5_format_name)
model.load_weights(tf_format_name)
y1, y2 = model.predict([x1, x2])
self.assertAllClose(y_ref_1, y1, atol=1e-5)
self.assertAllClose(y_ref_2, y2, atol=1e-5)
if h5py is not None:
model.load_weights(hdf5_format_name)
y1, y2 = model.predict([x1, x2])
self.assertAllClose(y_ref_1, y1, atol=1e-5)
self.assertAllClose(y_ref_2, y2, atol=1e-5)
@test_util.run_in_graph_and_eager_modes
def test_summary(self):
class ToString(object):
def __init__(self):
self.contents = ''
def __call__(self, msg):
self.contents += msg + '\n'
# Single-io
model = SimpleTestModel(num_classes=4, use_bn=True, use_dp=True)
model._set_inputs(np.ones((3, 4))) # need to build model first
print_fn = ToString()
model.summary(print_fn=print_fn)
self.assertTrue('Trainable params: 356' in print_fn.contents)
# Multi-io
model = MultiIOTestModel(num_classes=(5, 6), use_bn=True, use_dp=True)
model._set_inputs([np.ones((3, 4)),
np.ones((3, 4))]) # need to build model first
print_fn = ToString()
model.summary(print_fn=print_fn)
self.assertTrue('Trainable params: 587' in print_fn.contents)
@test_util.run_in_graph_and_eager_modes
def test_subclass_nested_in_subclass(self):
num_classes = 2
num_samples = 100
input_dim = 50
model = NestedTestModel1(num_classes=num_classes)
model.compile(loss='mse',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=['acc'])
x = np.ones((num_samples, input_dim))
y = np.zeros((num_samples, num_classes))
model.fit(x, y, epochs=2, batch_size=32, verbose=0)
_ = model.evaluate(x, y, verbose=0)
self.assertEqual(len(model.weights), 8 + len(model.test_net.weights))
self.assertEqual(len(model.non_trainable_weights),
2 + len(model.test_net.non_trainable_weights))
self.assertEqual(len(model.trainable_weights),
6 + len(model.test_net.trainable_weights))
@test_util.run_in_graph_and_eager_modes
def test_graph_nested_in_subclass(self):
num_classes = 2
num_samples = 100
input_dim = 50
model = NestedTestModel2(num_classes=num_classes)
model.compile(loss='mse',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=['acc'])
x = np.ones((num_samples, input_dim))
y = np.zeros((num_samples, num_classes))
model.fit(x, y, epochs=2, batch_size=32, verbose=0)
_ = model.evaluate(x, y, verbose=0)
self.assertEqual(len(model.weights), 8 + len(model.test_net.weights))
self.assertEqual(len(model.non_trainable_weights),
2 + len(model.test_net.non_trainable_weights))
self.assertEqual(len(model.trainable_weights),
6 + len(model.test_net.trainable_weights))
@test_util.run_in_graph_and_eager_modes
def test_subclass_nested_in_graph(self):
num_classes = 2
num_samples = 100
input_dim = 50
model = get_nested_model_3(input_dim=input_dim, num_classes=num_classes)
model.compile(loss='mse',
optimizer=RMSPropOptimizer(learning_rate=0.001),
metrics=['acc'])
x = np.ones((num_samples, input_dim))
y = np.zeros((num_samples, num_classes))
model.fit(x, y, epochs=2, batch_size=32, verbose=0)
_ = model.evaluate(x, y, verbose=0)
self.assertEqual(len(model.weights), 16)
self.assertEqual(
len(model.non_trainable_weights), 4)
self.assertEqual(len(model.trainable_weights), 12)
@test_util.run_in_graph_and_eager_modes
def test_support_for_manual_training_arg(self):
# In most cases, the `training` argument is left unspecified, in which
# case it defaults to value corresponding to the Model method being used
# (fit -> True, predict -> False, etc).
# If the user writes their model `call` method to take
# an explicit `training` argument, we must check that the correct value
# is being passed to the model for each method call.
class DPNet(keras.Model):
def __init__(self):
super(DPNet, self).__init__()
self.dp = keras.layers.Dropout(0.5)
self.dense = keras.layers.Dense(1,
use_bias=False,
kernel_initializer='ones')
def call(self, inputs, training=False):
x = self.dp(inputs, training=training)
return self.dense(x)
model = DPNet()
x = np.ones((10, 10))
y = model.predict(x)
self.assertEqual(np.sum(y), np.sum(x))
model.compile(loss='mse', optimizer=RMSPropOptimizer(learning_rate=0.001))
loss = model.train_on_batch(x, y)
self.assertGreater(loss, 0.1)
def test_no_dependency(self):
class Foo(keras.Model):
def __init__(self):
super(Foo, self).__init__()
self.isdep = keras.layers.Dense(1)
self.notdep = data_structures.NoDependency(keras.layers.Dense(2))
self.notdep_var = data_structures.NoDependency(
resource_variable_ops.ResourceVariable(1., name='notdep_var'))
m = Foo()
self.assertEqual([m.isdep, m.notdep], m.layers)
self.assertEqual(1, len(m._checkpoint_dependencies))
self.assertIs(m.isdep, m._checkpoint_dependencies[0].ref)
self.assertEqual('notdep_var:0', m.notdep_var.name)
def test_extra_variable(self):
class ExtraVar(keras.Model):
def __init__(self):
super(ExtraVar, self).__init__()
self.dense = keras.layers.Dense(1)
self.var = resource_variable_ops.ResourceVariable(1.)
self.not_trainable_var = resource_variable_ops.ResourceVariable(
2., trainable=False)
def call(self, inputs):
return self.dense(inputs + self.var)
m = ExtraVar()
self.assertTrue(m.trainable)
self.assertEqual([m.dense], m.layers)
self.assertEqual([m.var, m.not_trainable_var], m.variables)
self.assertEqual([m.var], m.trainable_variables)
self.assertEqual([m.not_trainable_var], m.non_trainable_variables)
m.trainable = False
self.assertEqual([m.var, m.not_trainable_var], m.variables)
self.assertEqual([], m.trainable_variables)
self.assertEqual([m.var, m.not_trainable_var], m.non_trainable_variables)
m.trainable = True
m(array_ops.ones([1, 1]))
self.assertEqual([m.dense.kernel, m.dense.bias], m.dense.variables)
self.assertEqual([m.dense.kernel, m.dense.bias], m.dense.weights)
self.assertEqual([m.dense.kernel, m.dense.bias, m.var, m.not_trainable_var],
m.variables)
self.assertEqual([m.dense.kernel, m.dense.bias, m.var],
m.trainable_variables)
self.assertEqual([m.not_trainable_var], m.non_trainable_variables)
m.dense.trainable = False
self.assertEqual(
[m.var, m.dense.kernel, m.dense.bias, m.not_trainable_var],
m.variables)
self.assertEqual([m.var], m.trainable_variables)
self.assertEqual([m.dense.kernel, m.dense.bias, m.not_trainable_var],
m.non_trainable_variables)
class CustomCallModel(keras.Model):
def __init__(self):
super(CustomCallModel, self).__init__()
self.dense1 = keras.layers.Dense(1, activation='relu')
self.dense2 = keras.layers.Dense(1, activation='softmax')
def call(self, first, second, fiddle_with_output='no', training=True):
combined = self.dense1(first) + self.dense2(second)
if fiddle_with_output == 'yes':
return 10. * combined
else:
return combined
class TrainingNoDefaultModel(keras.Model):
def __init__(self):
super(TrainingNoDefaultModel, self).__init__()
self.dense1 = keras.layers.Dense(1)
def call(self, x, training):
return self.dense1(x)
class CustomCallSignatureTests(test.TestCase):
@test_util.run_in_graph_and_eager_modes
def test_no_inputs_in_signature(self):
model = CustomCallModel()
first = array_ops.ones([2, 3])
second = array_ops.ones([2, 5])
output = model(first, second)
self.evaluate([v.initializer for v in model.variables])
expected_output = self.evaluate(model.dense1(first) + model.dense2(second))
self.assertAllClose(expected_output, self.evaluate(output))
output = model(first, second, fiddle_with_output='yes')
self.assertAllClose(10. * expected_output, self.evaluate(output))
output = model(first, second=second, training=False)
self.assertAllClose(expected_output, self.evaluate(output))
@test_util.run_in_graph_and_eager_modes
def test_training_args_call_build(self):
input_dim = 2
model = TrainingNoDefaultModel()
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
model.build((None, input_dim))
self.assertTrue(model.weights, ('Model should have weights now that it '
'has been properly built.'))
self.assertTrue(model.built, 'Model should be built after calling `build`.')
@test_util.run_in_graph_and_eager_modes
def test_custom_call_kwargs_and_build(self):
first_input_shape = (2, 3)
second_input_shape = (2, 5)
model = CustomCallModel()
self.assertFalse(model.built, 'Model should not have been built')
self.assertFalse(model.weights, ('Model should have no weights since it '
'has not been built.'))
with self.assertRaisesRegexp(
ValueError, 'cannot build your model if it has positional'):
model.build(input_shape=[first_input_shape, second_input_shape])
@test_util.run_in_graph_and_eager_modes
def test_inputs_in_signature(self):
class HasInputsAndOtherPositional(keras.Model):
def call(self, inputs, some_other_arg, training=False):
return inputs
def compute_output_shape(self, input_shape):
return input_shape
model = HasInputsAndOtherPositional()
with self.assertRaisesRegexp(
TypeError, 'everything else as a keyword argument'):
x1, x2 = keras.Input((1, 1)), keras.Input((1, 1))
model(x1, x2)
@test_util.run_in_graph_and_eager_modes
def test_kwargs_in_signature(self):
class HasKwargs(keras.Model):
def call(self, x, y=3, **key_words):
return x
model = HasKwargs()
arg = array_ops.ones([])
model(arg, a=3)
if not context.executing_eagerly():
six.assertCountEqual(self, [arg], model.inputs)
@test_util.run_in_graph_and_eager_modes
def test_args_in_signature(self):
class HasArgs(keras.Model):
def call(self, x, *args, **kwargs):
return [x] + list(args)
def compute_output_shape(self, input_shape):
return input_shape
model = HasArgs()
x1, x2, x3 = keras.Input((1, 1)), keras.Input((1, 1)), keras.Input((1, 1))
model(x1, x2, x3, a=3)
if not context.executing_eagerly():
six.assertCountEqual(self, [x1, x2, x3], model.inputs)
def test_args_and_keywords_in_signature(self):
class HasArgs(keras.Model):
def call(self, x, training=True, *args, **kwargs):
return x
with context.graph_mode():
model = HasArgs()
x1, x2, x3 = keras.Input((1, 1)), keras.Input((1, 1)), keras.Input((1, 1))
with self.assertRaisesRegexp(TypeError, 'args and arguments with'):
model(x1, x2, x3, a=3)
def test_training_no_default(self):
with context.graph_mode():
model = TrainingNoDefaultModel()
arg = array_ops.ones([1, 1])
model(arg, True)
six.assertCountEqual(self, [arg], model.inputs)
def test_training_no_default_with_positional(self):
class TrainingNoDefaultWithPositional(keras.Model):
def call(self, x, training, positional):
return x
with context.graph_mode():
model = TrainingNoDefaultWithPositional()
x1, x2, x3 = keras.Input((1, 1)), keras.Input((1, 1)), keras.Input((1, 1))
with self.assertRaisesRegexp(TypeError, 'after a non-input'):
model(x1, x2, x3)
if __name__ == '__main__':
test.main()
|
{
"content_hash": "8848f3d2c8052b5691754824a810aecd",
"timestamp": "",
"source": "github",
"line_count": 1118,
"max_line_length": 80,
"avg_line_length": 35.22450805008945,
"alnum_prop": 0.6322338183387928,
"repo_name": "AnishShah/tensorflow",
"id": "3a1b00041fb01b93258f284b9c871b1da19d7278",
"size": "40070",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "tensorflow/python/keras/model_subclassing_test.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Assembly",
"bytes": "1286"
},
{
"name": "Batchfile",
"bytes": "9258"
},
{
"name": "C",
"bytes": "337393"
},
{
"name": "C#",
"bytes": "8446"
},
{
"name": "C++",
"bytes": "48452986"
},
{
"name": "CMake",
"bytes": "195768"
},
{
"name": "Dockerfile",
"bytes": "36400"
},
{
"name": "Go",
"bytes": "1210238"
},
{
"name": "HTML",
"bytes": "4681865"
},
{
"name": "Java",
"bytes": "834103"
},
{
"name": "Jupyter Notebook",
"bytes": "2584246"
},
{
"name": "LLVM",
"bytes": "6536"
},
{
"name": "Makefile",
"bytes": "52618"
},
{
"name": "Objective-C",
"bytes": "15650"
},
{
"name": "Objective-C++",
"bytes": "99243"
},
{
"name": "PHP",
"bytes": "1357"
},
{
"name": "Perl",
"bytes": "7536"
},
{
"name": "PureBasic",
"bytes": "25356"
},
{
"name": "Python",
"bytes": "40782103"
},
{
"name": "Ruby",
"bytes": "553"
},
{
"name": "Shell",
"bytes": "458367"
},
{
"name": "Smarty",
"bytes": "6976"
}
],
"symlink_target": ""
}
|
"""
The pyro wire protocol message.
Pyro - Python Remote Objects. Copyright by Irmen de Jong (irmen@razorvine.net).
"""
import hashlib
import hmac
import struct
import logging
import sys
import zlib
from Pyro4 import errors, constants
from Pyro4.configuration import config
__all__ = ["Message", "secure_compare"]
log = logging.getLogger("Pyro4.message")
MSG_CONNECT = 1
MSG_CONNECTOK = 2
MSG_CONNECTFAIL = 3
MSG_INVOKE = 4
MSG_RESULT = 5
MSG_PING = 6
FLAGS_EXCEPTION = 1 << 0
FLAGS_COMPRESSED = 1 << 1
FLAGS_ONEWAY = 1 << 2
FLAGS_BATCH = 1 << 3
FLAGS_META_ON_CONNECT = 1 << 4
FLAGS_ITEMSTREAMRESULT = 1 << 5
FLAGS_KEEPSERIALIZED = 1 << 6
class Message(object):
"""
Pyro write protocol message.
Wire messages contains of a fixed size header, an optional set of annotation chunks,
and then the payload data. This class doesn't deal with the payload data:
(de)serialization and handling of that data is done elsewhere.
Annotation chunks are only parsed, except the 'HMAC' chunk: that is created
and validated because it is used as a message digest.
The header format is::
4 id ('PYRO')
2 protocol version
2 message type
2 message flags
2 sequence number
4 data length (i.e. 2 Gb data size limitation)
2 data serialization format (serializer id)
2 annotations length (total of all chunks, 0 if no annotation chunks present)
2 (reserved)
2 checksum
After the header, zero or more annotation chunks may follow, of the format::
4 id (ASCII)
2 chunk length
x annotation chunk databytes
After that, the actual payload data bytes follow.
The sequencenumber is used to check if response messages correspond to the
actual request message. This prevents the situation where Pyro would perhaps return
the response data from another remote call (which would not result in an error otherwise!)
This could happen for instance if the socket data stream gets out of sync, perhaps due To
some form of signal that interrupts I/O.
The header checksum is a simple sum of the header fields to make reasonably sure
that we are dealing with an actual correct PYRO protocol header and not some random
data that happens to start with the 'PYRO' protocol identifier.
Pyro now uses two annotation chunks that you should not touch yourself:
'HMAC' contains the hmac digest of the message data bytes and
all of the annotation chunk data bytes (except those of the HMAC chunk itself).
'CORR' contains the correlation id (guid bytes)
Other chunk names are free to use for custom purposes, but Pyro has the right
to reserve more of them for internal use in the future.
"""
__slots__ = ["type", "flags", "seq", "data", "data_size", "serializer_id", "annotations", "annotations_size", "hmac_key"]
header_format = '!4sHHHHiHHHH'
header_size = struct.calcsize(header_format)
checksum_magic = 0x34E9
def __init__(self, msgType, databytes, serializer_id, flags, seq, annotations=None, hmac_key=None):
self.type = msgType
self.flags = flags
self.seq = seq
self.data = databytes
self.data_size = len(self.data)
self.serializer_id = serializer_id
self.annotations = dict(annotations or {})
self.hmac_key = hmac_key
if self.hmac_key:
self.annotations["HMAC"] = self.hmac() # should be done last because it calculates hmac over other annotations
self.annotations_size = sum([6 + len(v) for v in self.annotations.values()])
if 0 < config.MAX_MESSAGE_SIZE < (self.data_size + self.annotations_size):
raise errors.MessageTooLargeError("max message size exceeded (%d where max=%d)" %
(self.data_size + self.annotations_size, config.MAX_MESSAGE_SIZE))
def __repr__(self):
return "<%s.%s at %x; type=%d flags=%d seq=%d datasize=%d #ann=%d>" %\
(self.__module__, self.__class__.__name__, id(self), self.type, self.flags, self.seq, self.data_size, len(self.annotations))
def to_bytes(self):
"""creates a byte stream containing the header followed by annotations (if any) followed by the data"""
return self.__header_bytes() + self.__annotations_bytes() + self.data
def __header_bytes(self):
if not (0 <= self.data_size <= 0x7fffffff):
raise ValueError("invalid message size (outside range 0..2Gb)")
checksum = (self.type + constants.PROTOCOL_VERSION + self.data_size + self.annotations_size +
self.serializer_id + self.flags + self.seq + self.checksum_magic) & 0xffff
return struct.pack(self.header_format, b"PYRO", constants.PROTOCOL_VERSION, self.type, self.flags,
self.seq, self.data_size, self.serializer_id, self.annotations_size, 0, checksum)
def __annotations_bytes(self):
if self.annotations:
a = []
for k, v in self.annotations.items():
if len(k) != 4:
raise errors.ProtocolError("annotation key must be of length 4")
if sys.version_info >= (3, 0):
k = k.encode("ASCII")
a.append(struct.pack("!4sH", k, len(v)))
a.append(v)
return b"".join(a)
return b""
# Note: this 'chunked' way of sending is not used because it triggers Nagle's algorithm
# on some systems (linux). This causes big delays, unless you change the socket option
# TCP_NODELAY to disable the algorithm. What also works, is sending all the message bytes
# in one go: connection.send(message.to_bytes()). This is what Pyro does.
def send(self, connection):
"""send the message as bytes over the connection"""
connection.send(self.__header_bytes())
if self.annotations:
connection.send(self.__annotations_bytes())
connection.send(self.data)
@classmethod
def from_header(cls, headerData):
"""Parses a message header. Does not yet process the annotations chunks and message data."""
if not headerData or len(headerData) != cls.header_size:
raise errors.ProtocolError("header data size mismatch")
tag, ver, msg_type, flags, seq, data_size, serializer_id, anns_size, _, checksum = struct.unpack(cls.header_format, headerData)
if tag != b"PYRO" or ver != constants.PROTOCOL_VERSION:
raise errors.ProtocolError("invalid data or unsupported protocol version")
if checksum != (msg_type + ver + data_size + anns_size + flags + serializer_id + seq + cls.checksum_magic) & 0xffff:
raise errors.ProtocolError("header checksum mismatch")
msg = Message(msg_type, b"", serializer_id, flags, seq)
msg.data_size = data_size
msg.annotations_size = anns_size
return msg
@classmethod
def recv(cls, connection, requiredMsgTypes=None, hmac_key=None):
"""
Receives a pyro message from a given connection.
Accepts the given message types (None=any, or pass a sequence).
Also reads annotation chunks and the actual payload data.
Validates a HMAC chunk if present.
"""
msg = cls.from_header(connection.recv(cls.header_size))
msg.hmac_key = hmac_key
if 0 < config.MAX_MESSAGE_SIZE < (msg.data_size + msg.annotations_size):
errorMsg = "max message size exceeded (%d where max=%d)" % (msg.data_size + msg.annotations_size, config.MAX_MESSAGE_SIZE)
log.error("connection " + str(connection) + ": " + errorMsg)
connection.close() # close the socket because at this point we can't return the correct seqnr for returning an errormsg
exc = errors.MessageTooLargeError(errorMsg)
exc.pyroMsg = msg
raise exc
if requiredMsgTypes and msg.type not in requiredMsgTypes:
err = "invalid msg type %d received" % msg.type
log.error(err)
exc = errors.ProtocolError(err)
exc.pyroMsg = msg
raise exc
if msg.annotations_size:
# read annotation chunks
annotations_data = connection.recv(msg.annotations_size)
msg.annotations = {}
i = 0
while i < msg.annotations_size:
anno, length = struct.unpack("!4sH", annotations_data[i:i + 6])
if sys.version_info >= (3, 0):
anno = anno.decode("ASCII")
msg.annotations[anno] = annotations_data[i + 6:i + 6 + length]
if sys.platform == "cli":
msg.annotations[anno] = bytes(msg.annotations[anno])
i += 6 + length
# read data
msg.data = connection.recv(msg.data_size)
if "HMAC" in msg.annotations and hmac_key:
if not secure_compare(msg.annotations["HMAC"], msg.hmac()):
exc = errors.SecurityError("message hmac mismatch")
exc.pyroMsg = msg
raise exc
elif ("HMAC" in msg.annotations) != bool(hmac_key):
# Not allowed: message contains hmac but hmac_key is not set, or vice versa.
err = "hmac key config not symmetric"
log.warning(err)
exc = errors.SecurityError(err)
exc.pyroMsg = msg
raise exc
return msg
def hmac(self):
"""returns the hmac of the data and the annotation chunk values (except HMAC chunk itself)"""
mac = hmac.new(self.hmac_key, self.data, digestmod=hashlib.sha1)
for k, v in sorted(self.annotations.items()): # note: sorted because we need fixed order to get the same hmac
if k != "HMAC":
mac.update(v)
return mac.digest() if sys.platform != "cli" else bytes(mac.digest())
@staticmethod
def ping(pyroConnection, hmac_key=None):
"""Convenience method to send a 'ping' message and wait for the 'pong' response"""
ping = Message(MSG_PING, b"ping", 42, 0, 0, hmac_key=hmac_key)
pyroConnection.send(ping.to_bytes())
Message.recv(pyroConnection, [MSG_PING])
def decompress_if_needed(self):
"""Decompress the message data if it is compressed."""
if self.flags & FLAGS_COMPRESSED:
self.data = zlib.decompress(self.data)
self.flags &= ~FLAGS_COMPRESSED
self.data_size = len(self.data)
return self
try:
from hmac import compare_digest as secure_compare
except ImportError:
# Python version doesn't have it natively, use a python fallback implementation
import operator
try:
reduce
except NameError:
from functools import reduce
def secure_compare(a, b):
if type(a) != type(b):
raise TypeError("arguments must both be same type")
if len(a) != len(b):
return False
return reduce(operator.and_, map(operator.eq, a, b), True)
|
{
"content_hash": "80b25ce9af4d47a5083e9a084cdd7f68",
"timestamp": "",
"source": "github",
"line_count": 251,
"max_line_length": 139,
"avg_line_length": 44.15139442231076,
"alnum_prop": 0.6297599711243458,
"repo_name": "irmen/Pyro4",
"id": "c33f09345c858cab3121a3fad70c6f9205721901",
"size": "11082",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/Pyro4/message.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "1283"
},
{
"name": "Python",
"bytes": "618799"
},
{
"name": "Shell",
"bytes": "2394"
}
],
"symlink_target": ""
}
|
from django import forms
from django.utils.translation import ugettext_lazy as _
# from fobi.contrib.plugins.form_elements.security.honeypot.settings import (
# HONEYPOT_VALUE
# )
__title__ = 'fobi.contrib.plugins.form_elements.security.honeypot.fields'
__author__ = 'Artur Barseghyan <artur.barseghyan@gmail.com>'
__copyright__ = '2014-2017 Artur Barseghyan'
__license__ = 'GPL 2.0/LGPL 2.1'
__all__ = ('HoneypotField',)
class HoneypotField(forms.CharField):
"""HoneypotField"""
default_error_messages = {
'invalid': _('Field value was tampered with.'),
}
widget = forms.HiddenInput
def clean(self, value):
"""Check that honeypot value remained the same."""
if value != self.initial:
raise forms.ValidationError(
self.error_messages['invalid'], code='invalid'
)
return value
|
{
"content_hash": "50d5518c91d4804027f946ca86333fc2",
"timestamp": "",
"source": "github",
"line_count": 29,
"max_line_length": 77,
"avg_line_length": 30.275862068965516,
"alnum_prop": 0.6526195899772209,
"repo_name": "mansonul/events",
"id": "339199d51aa7fac55d141db28cd2cfb56c0e71e1",
"size": "878",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "events/contrib/plugins/form_elements/security/honeypot/fields.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "90251"
},
{
"name": "HTML",
"bytes": "186225"
},
{
"name": "JavaScript",
"bytes": "43221"
},
{
"name": "Python",
"bytes": "804726"
},
{
"name": "Shell",
"bytes": "4196"
}
],
"symlink_target": ""
}
|
"""
SLA (Service-level agreement) is set of details for determining compliance
with contracted values such as maximum error rate or minimum response time.
"""
from rally.common.i18n import _
from rally.common import streaming_algorithms
from rally import consts
from rally.task import sla
@sla.configure(name="outliers")
class Outliers(sla.SLA):
"""Limit the number of outliers (iterations that take too much time).
The outliers are detected automatically using the computation of the mean
and standard deviation (std) of the data.
"""
CONFIG_SCHEMA = {
"type": "object",
"$schema": consts.JSON_SCHEMA,
"properties": {
"max": {"type": "integer", "minimum": 0},
"min_iterations": {"type": "integer", "minimum": 3},
"sigmas": {"type": "number", "minimum": 0.0,
"exclusiveMinimum": True}
}
}
def __init__(self, criterion_value):
super(Outliers, self).__init__(criterion_value)
self.max_outliers = self.criterion_value.get("max", 0)
# NOTE(msdubov): Having 3 as default is reasonable (need enough data).
self.min_iterations = self.criterion_value.get("min_iterations", 3)
self.sigmas = self.criterion_value.get("sigmas", 3.0)
self.iterations = 0
self.outliers = 0
self.threshold = None
self.mean_comp = streaming_algorithms.MeanStreamingComputation()
self.std_comp = streaming_algorithms.StdDevStreamingComputation()
def add_iteration(self, iteration):
if not iteration.get("error"):
duration = iteration["duration"]
self.iterations += 1
# NOTE(msdubov): First check if the current iteration is an outlier
if ((self.iterations >= self.min_iterations and self.threshold and
duration > self.threshold)):
self.outliers += 1
# NOTE(msdubov): Then update the threshold value
self.mean_comp.add(duration)
self.std_comp.add(duration)
if self.iterations >= 2:
mean = self.mean_comp.result()
std = self.std_comp.result()
self.threshold = mean + self.sigmas * std
self.success = self.outliers <= self.max_outliers
return self.success
def details(self):
return (_("Maximum number of outliers %i <= %i - %s") %
(self.outliers, self.max_outliers, self.status()))
|
{
"content_hash": "1e68c0e029e62871912f461b08995e3d",
"timestamp": "",
"source": "github",
"line_count": 65,
"max_line_length": 79,
"avg_line_length": 38.276923076923076,
"alnum_prop": 0.6093247588424437,
"repo_name": "shdowofdeath/rally",
"id": "92e8cb8003ea242f43d91499b535f38ea65c2719",
"size": "3119",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "rally/plugins/common/sla/outliers.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "46737"
},
{
"name": "Python",
"bytes": "2421750"
},
{
"name": "Shell",
"bytes": "36795"
}
],
"symlink_target": ""
}
|
import os
import json
import logging
import urllib3
import ca_certs_locater
from upwork.oauth import OAuth
from upwork.http import raise_http_error
from upwork.utils import decimal_default
from upwork.exceptions import IncorrectJsonResponseError
__all__ = ["Client"]
logger = logging.getLogger('python-upwork')
if os.environ.get("PYTHON_UPWORK_DEBUG", False):
if os.environ.get("PYTHON_UPWORK_DEBUG_FILE", False):
fh = logging.FileHandler(filename=os.environ["PYTHON_UPWORK_DEBUG_FILE"]
)
fh.setLevel(logging.DEBUG)
logger.addHandler(fh)
else:
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
logger.addHandler(ch)
else:
ch = logging.StreamHandler()
ch.setLevel(logging.CRITICAL)
logger.addHandler(ch)
class Client(object):
"""
Main API client with oAuth v1 authorization.
*Parameters:*
:public_key: Public API key
:secret_key: API key secret
:oauth_access_token: oAuth access token public key
:oauth_access_token_secret: oAuth access token secret key
:fmt: (optional, default ``json``)
API response format.
Currently only ``'json'`` is supported
:finreport: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.finreport` router
:hr: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.hr` router
:mc: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.mc` router
:offers: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.offers` router
:provider: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.provider` router
:task: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.task` router
:team: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.team` router
:timereport: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.timereport` router
:job: (optional, default ``True``)
Whether to attach
:py:mod:`upwork.routers.job` router
:timeout: (optional, default ``3 secs``)
Socket operations timeout.
"""
def __init__(self, public_key, secret_key,
oauth_access_token=None, oauth_access_token_secret=None,
fmt='json', finreport=True, hr=True, mc=True,
offers=True, provider=True, task=True, team=True,
timereport=True, job=True, timeout=3):
self.public_key = public_key
self.secret_key = secret_key
self.fmt = fmt
# Catch the warning about
# """
# SecurityWarning: Certificate has no `subjectAltName`,
# falling back to check for a `commonName` for now.
# This feature is being removed by major browsers
# and deprecated by RFC 2818.
# (See https://github.com/shazow/urllib3/issues/497 for details.)
# """
# The warning will appear only in logs
logging.captureWarnings(True)
self.http = urllib3.PoolManager(
cert_reqs='CERT_REQUIRED',
ca_certs=ca_certs_locater.get(),
timeout=int(timeout)
)
self.oauth_access_token = oauth_access_token
self.oauth_access_token_secret = oauth_access_token_secret
#Namespaces
self.auth = OAuth(self)
if finreport:
from upwork.routers.finreport import Finreports
self.finreport = Finreports(self)
if hr:
from upwork.routers.hr import HR_V1, HR, HR_V3
self.hr_v1 = HR_V1(self)
self.hr = HR(self)
self.hr_v3 = HR_V3(self)
if mc:
from upwork.routers.mc import MC
self.mc = MC(self)
if offers:
from upwork.routers.offers import Offers
self.offers = Offers(self)
if provider:
from upwork.routers.provider import Provider, Provider_V2
self.provider = Provider(self)
self.provider_v2 = Provider_V2(self)
if task:
from upwork.routers.task import Task, Task_V2
self.task = Task(self)
self.task_v2 = Task_V2(self)
if team:
from upwork.routers.team import Team, Team_V2
self.team = Team(self)
self.team_v2 = Team_V2(self)
if timereport:
from upwork.routers.timereport import TimeReport
self.timereport = TimeReport(self)
if job:
from upwork.routers.job import Job
self.job = Job(self)
#Shortcuts for HTTP methods
def get(self, url, data=None):
return self.read(url, data, method='GET', fmt=self.fmt)
def post(self, url, data=None):
return self.read(url, data, method='POST', fmt=self.fmt)
def put(self, url, data=None):
return self.read(url, data, method='PUT', fmt=self.fmt)
def delete(self, url, data=None):
return self.read(url, data, method='DELETE', fmt=self.fmt)
# The method that actually makes HTTP requests
def urlopen(self, url, data=None, method='GET', headers=None):
"""Perform oAuth v1 signed HTTP request.
*Parameters:*
:url: Target url
:data: Dictionary with parameters
:method: (optional, default ``GET``)
HTTP method, possible values:
* ``GET``
* ``POST``
* ``PUT``
* ``DELETE``
:headers: (optional, default ``{}``)
Dictionary with header values
"""
if headers is None:
headers = {}
self.last_method = method
self.last_url = url
self.last_data = data
# TODO: Headers are not supported fully yet
# instead we pass oauth parameters in querystring
if method in ('PUT', 'DELETE'):
post_data = self.auth.get_oauth_params(
url, self.oauth_access_token,
self.oauth_access_token_secret,
{}, method) # don't need parameters in url
else:
if data is None:
data = {}
post_data = self.auth.get_oauth_params(
url, self.oauth_access_token,
self.oauth_access_token_secret,
data, method)
if method == 'GET':
url = '{0}?{1}'.format(url, post_data)
return self.http.urlopen(method, url)
elif method == 'POST':
return self.http.urlopen(
method, url, body=post_data,
headers={'Content-Type':
'application/x-www-form-urlencoded;charset=UTF-8'})
elif method in ('PUT', 'DELETE'):
url = '{0}?{1}'.format(url, post_data)
headers['Content-Type'] = 'application/json'
if data is not None:
data_json = json.dumps(data)
else:
data_json = ''
return self.http.urlopen(
method, url, body=data_json, headers=headers)
else:
raise Exception('Wrong http method: {0}. Supported'
'methods are: '
'GET, POST, PUT, DELETE'.format(method))
def read(self, url, data=None, method='GET', fmt='json'):
"""
Returns parsed Python object or raises an error.
*Parameters:*
:url: Target url
:data: Dictionary with parameters
:method: (optional, default ``GET``)
HTTP method, possible values:
* ``GET``
* ``POST``
* ``PUT``
* ``DELETE``
:fmt: (optional, default ``json``)
API response format.
Currently only ``'json'`` is supported
"""
assert fmt == 'json', "Only JSON format is supported at the moment"
if '/gds/' not in url:
url = '{0}.{1}'.format(url, fmt)
logger = logging.getLogger('python-upwork')
logger.debug('Prepairing to make Upwork call')
logger.debug('URL: {0}'.format(url))
try:
logger.debug('Data: {0}'.format(
json.dumps(data, default=decimal_default)))
except TypeError:
logger.debug('Data: {0}'.format(str(data)))
logger.debug('Method: {0}'.format(method))
response = self.urlopen(url, data, method)
if response.status != 200:
logger.debug('Error: {0}'.format(response))
raise_http_error(url, response)
result = response.data
logger.debug('Response: {0}'.format(result))
if fmt == 'json':
try:
result = json.loads(result)
except ValueError:
# Not a valid json string
logger.debug('Response is not a valid json string')
raise IncorrectJsonResponseError(
json.dumps({'status': response.status, 'body': result},
default=decimal_default)
)
return result
if __name__ == "__main__":
import doctest
doctest.testmod()
|
{
"content_hash": "532e859709f5008e538f49c909ae76be",
"timestamp": "",
"source": "github",
"line_count": 304,
"max_line_length": 80,
"avg_line_length": 34.0328947368421,
"alnum_prop": 0.5026097042335202,
"repo_name": "kipanshi/python-upwork",
"id": "c6f6e30eb3a949a9a7cb2e86dc67ab33d9799d4c",
"size": "10430",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "upwork/client.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Makefile",
"bytes": "3684"
},
{
"name": "Python",
"bytes": "212706"
},
{
"name": "Shell",
"bytes": "699"
}
],
"symlink_target": ""
}
|
"""Django settings for performance_db_frontend project.
Generated by 'django-admin startproject' using Django 1.8.1.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.8/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.8/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '^3xfu!jsgq(9ld$-!bhy$y^#n0!jh4n39c)+ocr(o!i)x+f%&!'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
PERF_DB_HOSTNAME = '0.0.0.0'
PERF_DB_PORT = 50052
# Application definition
INSTALLED_APPS = (
'django.contrib.admin', 'django.contrib.auth',
'django.contrib.contenttypes', 'django.contrib.sessions',
'django.contrib.messages', 'django.contrib.staticfiles', 'app',)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',)
ROOT_URLCONF = 'performance_db_frontend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'performance_db_frontend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.8/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.8/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
STATIC_URL = '/static/'
|
{
"content_hash": "5eac777cb414329f170ebf7c9dc1c7eb",
"timestamp": "",
"source": "github",
"line_count": 94,
"max_line_length": 71,
"avg_line_length": 29.01063829787234,
"alnum_prop": 0.6963696369636964,
"repo_name": "sidrakesh93/grpc-tools",
"id": "dcbc7fae946890462d8e07b7aecee36462202bb0",
"size": "4256",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "benchmarking/performance_db/performance_db_frontend/performance_db_frontend/settings.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C++",
"bytes": "15175"
},
{
"name": "HTML",
"bytes": "34528"
},
{
"name": "JavaScript",
"bytes": "88140"
},
{
"name": "Makefile",
"bytes": "4312"
},
{
"name": "Protocol Buffer",
"bytes": "10945"
},
{
"name": "Python",
"bytes": "49148"
},
{
"name": "Ruby",
"bytes": "2262"
},
{
"name": "Shell",
"bytes": "5328"
}
],
"symlink_target": ""
}
|
from ..Qt import QtCore, QtGui
from ..python2_3 import asUnicode
from .Parameter import Parameter, registerParameterType
from .ParameterItem import ParameterItem
from ..widgets.SpinBox import SpinBox
from ..widgets.ColorButton import ColorButton
from ..colormap import ColorMap
#from ..widgets.GradientWidget import GradientWidget ## creates import loop
from .. import pixmaps as pixmaps
from .. import functions as fn
import os, sys
from ..pgcollections import OrderedDict
class WidgetParameterItem(ParameterItem):
"""
ParameterTree item with:
* label in second column for displaying value
* simple widget for editing value (displayed instead of label when item is selected)
* button that resets value to default
========================== =============================================================
**Registered Types:**
int Displays a :class:`SpinBox <pyqtgraph.SpinBox>` in integer
mode.
float Displays a :class:`SpinBox <pyqtgraph.SpinBox>`.
bool Displays a QCheckBox
str Displays a QLineEdit
color Displays a :class:`ColorButton <pyqtgraph.ColorButton>`
colormap Displays a :class:`GradientWidget <pyqtgraph.GradientWidget>`
========================== =============================================================
This class can be subclassed by overriding makeWidget() to provide a custom widget.
"""
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.hideWidget = True ## hide edit widget, replace with label when not selected
## set this to False to keep the editor widget always visible
## build widget into column 1 with a display label and default button.
w = self.makeWidget()
self.widget = w
self.eventProxy = EventProxy(w, self.widgetEventFilter)
opts = self.param.opts
if 'tip' in opts:
w.setToolTip(opts['tip'])
self.defaultBtn = QtGui.QPushButton()
self.defaultBtn.setFixedWidth(20)
self.defaultBtn.setFixedHeight(20)
modDir = os.path.dirname(__file__)
self.defaultBtn.setIcon(QtGui.QIcon(pixmaps.getPixmap('default')))
self.defaultBtn.clicked.connect(self.defaultClicked)
self.displayLabel = QtGui.QLabel()
layout = QtGui.QHBoxLayout()
layout.setContentsMargins(0, 0, 0, 0)
layout.setSpacing(2)
layout.addWidget(w)
layout.addWidget(self.displayLabel)
layout.addWidget(self.defaultBtn)
self.layoutWidget = QtGui.QWidget()
self.layoutWidget.setLayout(layout)
if w.sigChanged is not None:
w.sigChanged.connect(self.widgetValueChanged)
if hasattr(w, 'sigChanging'):
w.sigChanging.connect(self.widgetValueChanging)
## update value shown in widget.
if opts.get('value', None) is not None:
self.valueChanged(self, opts['value'], force=True)
else:
## no starting value was given; use whatever the widget has
self.widgetValueChanged()
self.updateDefaultBtn()
def makeWidget(self):
"""
Return a single widget that should be placed in the second tree column.
The widget must be given three attributes:
========== ============================================================
sigChanged a signal that is emitted when the widget's value is changed
value a function that returns the value
setValue a function that sets the value
========== ============================================================
This is a good function to override in subclasses.
"""
opts = self.param.opts
t = opts['type']
if t in ('int', 'float'):
defs = {
'value': 0, 'min': None, 'max': None,
'step': 1.0, 'dec': False,
'siPrefix': False, 'suffix': '', 'decimals': 3,
}
if t == 'int':
defs['int'] = True
defs['minStep'] = 1.0
for k in defs:
if k in opts:
defs[k] = opts[k]
if 'limits' in opts:
defs['min'], defs['max'] = opts['limits']
w = SpinBox()
w.setOpts(**defs)
w.sigChanged = w.sigValueChanged
w.sigChanging = w.sigValueChanging
elif t == 'bool':
w = QtGui.QCheckBox()
w.sigChanged = w.toggled
w.value = w.isChecked
w.setValue = w.setChecked
w.setEnabled(not opts.get('readonly', False))
self.hideWidget = False
elif t == 'str':
w = QtGui.QLineEdit()
w.setStyleSheet('border: 0px')
w.sigChanged = w.editingFinished
w.value = lambda: asUnicode(w.text())
w.setValue = lambda v: w.setText(asUnicode(v))
w.sigChanging = w.textChanged
elif t == 'color':
w = ColorButton()
w.sigChanged = w.sigColorChanged
w.sigChanging = w.sigColorChanging
w.value = w.color
w.setValue = w.setColor
self.hideWidget = False
w.setFlat(True)
w.setEnabled(not opts.get('readonly', False))
elif t == 'colormap':
from ..widgets.GradientWidget import GradientWidget ## need this here to avoid import loop
w = GradientWidget(orientation='bottom')
w.sigChanged = w.sigGradientChangeFinished
w.sigChanging = w.sigGradientChanged
w.value = w.colorMap
w.setValue = w.setColorMap
self.hideWidget = False
else:
raise Exception("Unknown type '%s'" % asUnicode(t))
return w
def widgetEventFilter(self, obj, ev):
## filter widget's events
## catch TAB to change focus
## catch focusOut to hide editor
if ev.type() == ev.KeyPress:
if ev.key() == QtCore.Qt.Key_Tab:
self.focusNext(forward=True)
return True ## don't let anyone else see this event
elif ev.key() == QtCore.Qt.Key_Backtab:
self.focusNext(forward=False)
return True ## don't let anyone else see this event
#elif ev.type() == ev.FocusOut:
#self.hideEditor()
return False
def setFocus(self):
self.showEditor()
def isFocusable(self):
return self.param.writable()
def valueChanged(self, param, val, force=False):
## called when the parameter's value has changed
ParameterItem.valueChanged(self, param, val)
self.widget.sigChanged.disconnect(self.widgetValueChanged)
try:
if force or val != self.widget.value():
self.widget.setValue(val)
self.updateDisplayLabel(val) ## always make sure label is updated, even if values match!
finally:
self.widget.sigChanged.connect(self.widgetValueChanged)
self.updateDefaultBtn()
def updateDefaultBtn(self):
## enable/disable default btn
self.defaultBtn.setEnabled(not self.param.valueIsDefault() and self.param.writable())
# hide / show
self.defaultBtn.setVisible(not self.param.readonly())
def updateDisplayLabel(self, value=None):
"""Update the display label to reflect the value of the parameter."""
if value is None:
value = self.param.value()
opts = self.param.opts
if isinstance(self.widget, QtGui.QAbstractSpinBox):
text = asUnicode(self.widget.lineEdit().text())
elif isinstance(self.widget, QtGui.QComboBox):
text = self.widget.currentText()
else:
text = asUnicode(value)
self.displayLabel.setText(text)
def widgetValueChanged(self):
## called when the widget's value has been changed by the user
val = self.widget.value()
newVal = self.param.setValue(val)
def widgetValueChanging(self, *args):
"""
Called when the widget's value is changing, but not finalized.
For example: editing text before pressing enter or changing focus.
"""
# This is a bit sketchy: assume the last argument of each signal is
# the value..
self.param.sigValueChanging.emit(self.param, args[-1])
def selected(self, sel):
"""Called when this item has been selected (sel=True) OR deselected (sel=False)"""
ParameterItem.selected(self, sel)
if self.widget is None:
return
if sel and self.param.writable():
self.showEditor()
elif self.hideWidget:
self.hideEditor()
def showEditor(self):
self.widget.show()
self.displayLabel.hide()
self.widget.setFocus(QtCore.Qt.OtherFocusReason)
if isinstance(self.widget, SpinBox):
self.widget.selectNumber() # select the numerical portion of the text for quick editing
def hideEditor(self):
self.widget.hide()
self.displayLabel.show()
def limitsChanged(self, param, limits):
"""Called when the parameter's limits have changed"""
ParameterItem.limitsChanged(self, param, limits)
t = self.param.opts['type']
if t == 'int' or t == 'float':
self.widget.setOpts(bounds=limits)
else:
return ## don't know what to do with any other types..
def defaultChanged(self, param, value):
self.updateDefaultBtn()
def treeWidgetChanged(self):
"""Called when this item is added or removed from a tree."""
ParameterItem.treeWidgetChanged(self)
## add all widgets for this item into the tree
if self.widget is not None:
tree = self.treeWidget()
if tree is None:
return
tree.setItemWidget(self, 1, self.layoutWidget)
self.displayLabel.hide()
self.selected(False)
def defaultClicked(self):
self.param.setToDefault()
def optsChanged(self, param, opts):
"""Called when any options are changed that are not
name, value, default, or limits"""
#print "opts changed:", opts
ParameterItem.optsChanged(self, param, opts)
if 'readonly' in opts:
self.updateDefaultBtn()
if isinstance(self.widget, (QtGui.QCheckBox,ColorButton)):
self.widget.setEnabled(not opts['readonly'])
## If widget is a SpinBox, pass options straight through
if isinstance(self.widget, SpinBox):
# send only options supported by spinbox
sbOpts = {}
if 'units' in opts and 'suffix' not in opts:
sbOpts['suffix'] = opts['units']
for k,v in opts.items():
if k in self.widget.opts:
sbOpts[k] = v
self.widget.setOpts(**sbOpts)
self.updateDisplayLabel()
class EventProxy(QtCore.QObject):
def __init__(self, qobj, callback):
QtCore.QObject.__init__(self)
self.callback = callback
qobj.installEventFilter(self)
def eventFilter(self, obj, ev):
return self.callback(obj, ev)
class SimpleParameter(Parameter):
itemClass = WidgetParameterItem
def __init__(self, *args, **kargs):
Parameter.__init__(self, *args, **kargs)
## override a few methods for color parameters
if self.opts['type'] == 'color':
self.value = self.colorValue
self.saveState = self.saveColorState
def colorValue(self):
return fn.mkColor(Parameter.value(self))
def saveColorState(self, *args, **kwds):
state = Parameter.saveState(self, *args, **kwds)
state['value'] = fn.colorTuple(self.value())
return state
def _interpretValue(self, v):
fn = {
'int': int,
'float': float,
'bool': bool,
'str': asUnicode,
'color': self._interpColor,
'colormap': self._interpColormap,
}[self.opts['type']]
return fn(v)
def _interpColor(self, v):
return fn.mkColor(v)
def _interpColormap(self, v):
if not isinstance(v, ColorMap):
raise TypeError("Cannot set colormap parameter from object %r" % v)
return v
registerParameterType('int', SimpleParameter, override=True)
registerParameterType('float', SimpleParameter, override=True)
registerParameterType('bool', SimpleParameter, override=True)
registerParameterType('str', SimpleParameter, override=True)
registerParameterType('color', SimpleParameter, override=True)
registerParameterType('colormap', SimpleParameter, override=True)
class GroupParameterItem(ParameterItem):
"""
Group parameters are used mainly as a generic parent item that holds (and groups!) a set
of child parameters. It also provides a simple mechanism for displaying a button or combo
that can be used to add new parameters to the group.
"""
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.updateDepth(depth)
self.addItem = None
if 'addText' in param.opts:
addText = param.opts['addText']
if 'addList' in param.opts:
self.addWidget = QtGui.QComboBox()
self.addWidget.setSizeAdjustPolicy(QtGui.QComboBox.AdjustToContents)
self.updateAddList()
self.addWidget.currentIndexChanged.connect(self.addChanged)
else:
self.addWidget = QtGui.QPushButton(addText)
self.addWidget.clicked.connect(self.addClicked)
w = QtGui.QWidget()
l = QtGui.QHBoxLayout()
l.setContentsMargins(0,0,0,0)
w.setLayout(l)
l.addWidget(self.addWidget)
l.addStretch()
#l.addItem(QtGui.QSpacerItem(200, 10, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum))
self.addWidgetBox = w
self.addItem = QtGui.QTreeWidgetItem([])
self.addItem.setFlags(QtCore.Qt.ItemIsEnabled)
ParameterItem.addChild(self, self.addItem)
def updateDepth(self, depth):
## Change item's appearance based on its depth in the tree
## This allows highest-level groups to be displayed more prominently.
if depth == 0:
for c in [0,1]:
self.setBackground(c, QtGui.QBrush(QtGui.QColor(100,100,100)))
self.setForeground(c, QtGui.QBrush(QtGui.QColor(220,220,255)))
font = self.font(c)
font.setBold(True)
font.setPointSize(font.pointSize()+1)
self.setFont(c, font)
self.setSizeHint(0, QtCore.QSize(0, 25))
else:
for c in [0,1]:
self.setBackground(c, QtGui.QBrush(QtGui.QColor(220,220,220)))
font = self.font(c)
font.setBold(True)
#font.setPointSize(font.pointSize()+1)
self.setFont(c, font)
self.setSizeHint(0, QtCore.QSize(0, 20))
def addClicked(self):
"""Called when "add new" button is clicked
The parameter MUST have an 'addNew' method defined.
"""
self.param.addNew()
def addChanged(self):
"""Called when "add new" combo is changed
The parameter MUST have an 'addNew' method defined.
"""
if self.addWidget.currentIndex() == 0:
return
typ = asUnicode(self.addWidget.currentText())
self.param.addNew(typ)
self.addWidget.setCurrentIndex(0)
def treeWidgetChanged(self):
ParameterItem.treeWidgetChanged(self)
self.treeWidget().setFirstItemColumnSpanned(self, True)
if self.addItem is not None:
self.treeWidget().setItemWidget(self.addItem, 0, self.addWidgetBox)
self.treeWidget().setFirstItemColumnSpanned(self.addItem, True)
def addChild(self, child): ## make sure added childs are actually inserted before add btn
if self.addItem is not None:
ParameterItem.insertChild(self, self.childCount()-1, child)
else:
ParameterItem.addChild(self, child)
def optsChanged(self, param, changed):
if 'addList' in changed:
self.updateAddList()
def updateAddList(self):
self.addWidget.blockSignals(True)
try:
self.addWidget.clear()
self.addWidget.addItem(self.param.opts['addText'])
for t in self.param.opts['addList']:
self.addWidget.addItem(t)
finally:
self.addWidget.blockSignals(False)
class GroupParameter(Parameter):
"""
Group parameters are used mainly as a generic parent item that holds (and groups!) a set
of child parameters.
It also provides a simple mechanism for displaying a button or combo
that can be used to add new parameters to the group. To enable this, the group
must be initialized with the 'addText' option (the text will be displayed on
a button which, when clicked, will cause addNew() to be called). If the 'addList'
option is specified as well, then a dropdown-list of addable items will be displayed
instead of a button.
"""
itemClass = GroupParameterItem
sigAddNew = QtCore.Signal(object, object) # self, type
def addNew(self, typ=None):
"""
This method is called when the user has requested to add a new item to the group.
By default, it emits ``sigAddNew(self, typ)``.
"""
self.sigAddNew.emit(self, typ)
def setAddList(self, vals):
"""Change the list of options available for the user to add to the group."""
self.setOpts(addList=vals)
registerParameterType('group', GroupParameter, override=True)
class ListParameterItem(WidgetParameterItem):
"""
WidgetParameterItem subclass providing comboBox that lets the user select from a list of options.
"""
def __init__(self, param, depth):
self.targetValue = None
WidgetParameterItem.__init__(self, param, depth)
def makeWidget(self):
opts = self.param.opts
t = opts['type']
w = QtGui.QComboBox()
w.setMaximumHeight(20) ## set to match height of spin box and line edit
w.sigChanged = w.currentIndexChanged
w.value = self.value
w.setValue = self.setValue
self.widget = w ## needs to be set before limits are changed
self.limitsChanged(self.param, self.param.opts['limits'])
if len(self.forward) > 0:
self.setValue(self.param.value())
return w
def value(self):
key = asUnicode(self.widget.currentText())
return self.forward.get(key, None)
def setValue(self, val):
self.targetValue = val
if val not in self.reverse[0]:
self.widget.setCurrentIndex(0)
else:
key = self.reverse[1][self.reverse[0].index(val)]
ind = self.widget.findText(key)
self.widget.setCurrentIndex(ind)
def limitsChanged(self, param, limits):
# set up forward / reverse mappings for name:value
if len(limits) == 0:
limits = [''] ## Can never have an empty list--there is always at least a singhe blank item.
self.forward, self.reverse = ListParameter.mapping(limits)
try:
self.widget.blockSignals(True)
val = self.targetValue #asUnicode(self.widget.currentText())
self.widget.clear()
for k in self.forward:
self.widget.addItem(k)
if k == val:
self.widget.setCurrentIndex(self.widget.count()-1)
self.updateDisplayLabel()
finally:
self.widget.blockSignals(False)
class ListParameter(Parameter):
itemClass = ListParameterItem
def __init__(self, **opts):
self.forward = OrderedDict() ## {name: value, ...}
self.reverse = ([], []) ## ([value, ...], [name, ...])
## Parameter uses 'limits' option to define the set of allowed values
if 'values' in opts:
opts['limits'] = opts['values']
if opts.get('limits', None) is None:
opts['limits'] = []
Parameter.__init__(self, **opts)
self.setLimits(opts['limits'])
def setLimits(self, limits):
self.forward, self.reverse = self.mapping(limits)
Parameter.setLimits(self, limits)
if len(self.reverse[0]) > 0 and self.value() not in self.reverse[0]:
self.setValue(self.reverse[0][0])
#def addItem(self, name, value=None):
#if name in self.forward:
#raise Exception("Name '%s' is already in use for this parameter" % name)
#limits = self.opts['limits']
#if isinstance(limits, dict):
#limits = limits.copy()
#limits[name] = value
#self.setLimits(limits)
#else:
#if value is not None:
#raise Exception ## raise exception or convert to dict?
#limits = limits[:]
#limits.append(name)
## what if limits == None?
@staticmethod
def mapping(limits):
## Return forward and reverse mapping objects given a limit specification
forward = OrderedDict() ## {name: value, ...}
reverse = ([], []) ## ([value, ...], [name, ...])
if isinstance(limits, dict):
for k, v in limits.items():
forward[k] = v
reverse[0].append(v)
reverse[1].append(k)
else:
for v in limits:
n = asUnicode(v)
forward[n] = v
reverse[0].append(v)
reverse[1].append(n)
return forward, reverse
registerParameterType('list', ListParameter, override=True)
class ActionParameterItem(ParameterItem):
def __init__(self, param, depth):
ParameterItem.__init__(self, param, depth)
self.layoutWidget = QtGui.QWidget()
self.layout = QtGui.QHBoxLayout()
self.layout.setContentsMargins(0, 0, 0, 0)
self.layoutWidget.setLayout(self.layout)
self.button = QtGui.QPushButton(param.name())
#self.layout.addSpacing(100)
self.layout.addWidget(self.button)
self.layout.addStretch()
self.button.clicked.connect(self.buttonClicked)
param.sigNameChanged.connect(self.paramRenamed)
self.setText(0, '')
def treeWidgetChanged(self):
ParameterItem.treeWidgetChanged(self)
tree = self.treeWidget()
if tree is None:
return
tree.setFirstItemColumnSpanned(self, True)
tree.setItemWidget(self, 0, self.layoutWidget)
def paramRenamed(self, param, name):
self.button.setText(name)
def buttonClicked(self):
self.param.activate()
class ActionParameter(Parameter):
"""Used for displaying a button within the tree."""
itemClass = ActionParameterItem
sigActivated = QtCore.Signal(object)
def activate(self):
self.sigActivated.emit(self)
self.emitStateChanged('activated', None)
registerParameterType('action', ActionParameter, override=True)
class TextParameterItem(WidgetParameterItem):
def __init__(self, param, depth):
WidgetParameterItem.__init__(self, param, depth)
self.hideWidget = False
self.subItem = QtGui.QTreeWidgetItem()
self.addChild(self.subItem)
def treeWidgetChanged(self):
## TODO: fix so that superclass method can be called
## (WidgetParameter should just natively support this style)
#WidgetParameterItem.treeWidgetChanged(self)
self.treeWidget().setFirstItemColumnSpanned(self.subItem, True)
self.treeWidget().setItemWidget(self.subItem, 0, self.textBox)
# for now, these are copied from ParameterItem.treeWidgetChanged
self.setHidden(not self.param.opts.get('visible', True))
self.setExpanded(self.param.opts.get('expanded', True))
def makeWidget(self):
self.textBox = QtGui.QTextEdit()
self.textBox.setMaximumHeight(100)
self.textBox.setReadOnly(self.param.opts.get('readonly', False))
self.textBox.value = lambda: str(self.textBox.toPlainText())
self.textBox.setValue = self.textBox.setPlainText
self.textBox.sigChanged = self.textBox.textChanged
return self.textBox
class TextParameter(Parameter):
"""Editable string; displayed as large text box in the tree."""
itemClass = TextParameterItem
registerParameterType('text', TextParameter, override=True)
|
{
"content_hash": "42539443b0c747e4854c28ba98ca7fb3",
"timestamp": "",
"source": "github",
"line_count": 682,
"max_line_length": 106,
"avg_line_length": 37.68768328445748,
"alnum_prop": 0.5816052600863713,
"repo_name": "pmaunz/pyqtgraph",
"id": "d137410d68b9dace80ce338e49398dcd0c60880e",
"size": "25703",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pyqtgraph/parametertree/parameterTypes.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Matlab",
"bytes": "1752"
},
{
"name": "Python",
"bytes": "2125387"
}
],
"symlink_target": ""
}
|
from base64 import b64encode
from optparse import make_option
import json
import urllib
import urllib2
import argparse
import time
import os
import subprocess
def make_request(url, params, auth=None, data=None, contentType=None):
"""
Prepares a request from a url, params, and optionally authentication.
"""
print url + urllib.urlencode(params)
req = urllib2.Request(url + urllib.urlencode(params), data=data)
if auth:
req.add_header('AUTHORIZATION', 'Basic ' + auth)
if contentType:
req.add_header('Content-type', contentType)
else:
if data:
req.add_header('Content-type', 'text/xml')
return urllib2.urlopen(req)
def parse_url(url):
if (url is None) or len(url) == 0:
return None
index = url.rfind('/')
if index != (len(url)-1):
url += '/'
return url
def buildPOSTDataLayer(name,nativeName,title):
file_data ="/opt/cybergis-scripts.git/templates/post_postgis_layer.xml"
data = None
with open (file_data, "r") as f:
data = f.read().replace('{{name}}', name).replace('{{nativeName}}', nativeName).replace('{{title}}', title)
return data
def createLayer(verbose, geoserver, workspace, auth, datastore, layer, prefix):
if verbose > 0:
print('Creating GeoServer Layer for '+layer+".")
params = {}
if prefix:
name = prefix+"_"+layer
else:
name = layer
data = buildPOSTDataLayer(name,layer,name)
url = geoserver+"rest/workspaces/"+workspace+"/datastores/"+datastore+"/featuretypes.xml"
try:
request = make_request(url=url+'?', params=params, auth=auth, data=data)
except:
#raise Exception("Create layer failed with url="+url+", params="+str(params)+", data="+data)
print "Create layer failed with url="+url+", params="+str(params)+", data="+data
raise
if request.getcode() != 201:
raise Exception("Create layer failed: Status Code {0}".format(request.getcode()))
if verbose > 0:
print('Layer created.')
def parse_featuretypes(featureTypes):
if featureTypes and len(featureTypes) > 0:
try:
return featureTypes.split(",")
except:
return None
else:
return None
def run(args):
#print args
#==#
verbose = args.verbose
#==#
featureTypes = parse_featuretypes(args.featuretypes)
datastore = args.datastore
geoserver = parse_url(args.geoserver)
workspace = args.workspace
prefix = args.prefix
#==#
auth = None
if args.username and args.password:
auth = b64encode('{0}:{1}'.format(args.username, args.password))
#==#
print "=================================="
print "#==#"
print "CyberGIS Script / cybergis-scripts-geoserver-publish-layers.py"
print "Publish PostGIS Table as Layer"
print "#==#"
#==#
if not featureTypes:
print "Could not parse featuretypes correctly."
return 1;
#==#
#Publish Feature Types as Layers
for ft in featureTypes:
try:
createLayer(verbose, geoserver, workspace, auth, datastore, ft, prefix)
except:
print "Couldn't create layer from data store "+datastore+" for feature type "+ft+"."
raise
print "=================================="
|
{
"content_hash": "061545675eca6f4d4e889f3c75b8a383",
"timestamp": "",
"source": "github",
"line_count": 114,
"max_line_length": 115,
"avg_line_length": 29.54385964912281,
"alnum_prop": 0.6036223277909739,
"repo_name": "state-hiu/cybergis-scripts",
"id": "ce4f435b95d8e2d6db62b5e74546444595d6d2f5",
"size": "3368",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/cybergis/gs/_geoserver_publish_layers.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "PLpgSQL",
"bytes": "525"
},
{
"name": "Python",
"bytes": "99431"
},
{
"name": "Shell",
"bytes": "45614"
}
],
"symlink_target": ""
}
|
import dbus
import dbusmock
import bluefang
import subprocess
import pytest
adapter_name = 'hci0'
system_name = 'my-device'
address = '11:22:33:44:55:66'
alias = 'My Device'
class ClientDBusTestCase(dbusmock.DBusTestCase):
@classmethod
def setUpClass(klass):
klass.start_system_bus()
klass.dbus_con = klass.get_dbus(True)
(klass.p_mock, klass.obj_bluez) = klass.spawn_server_template('bluez5', {}, stdout=subprocess.PIPE)
def setUp(self):
try:
self.obj_bluez.Reset()
except:
pass # fuggedaboutit
self.dbusmock = dbus.Interface(self.obj_bluez, dbusmock.MOCK_IFACE)
self.dbusmock_bluez = dbus.Interface(self.obj_bluez, 'org.bluez.Mock')
def test_info_without_device(self):
with pytest.raises(Exception) as e:
connection = bluefang.Bluefang()
connection.info()
assert str(e.value) == 'Unable to find Bluetooth device'
def test_info(self):
self.dbusmock_bluez.AddAdapter(adapter_name, system_name)
self.dbusmock_bluez.AddDevice(adapter_name, address, alias)
connection = bluefang.Bluefang()
adapter = connection.info()
assert(adapter['Name'] == system_name)
assert(adapter['Discoverable'])
assert(adapter['Class'] == 268)
def test_scan_without_adapter_or_device(self):
with pytest.raises(dbus.exceptions.DBusException) as e:
connection = bluefang.Bluefang()
connection.scan(timeout_in_ms=1)
err_msg = 'Method "StartDiscovery" with signature "" on interface "org.bluez.Adapter1" doesn\'t exist'
assert err_msg in str(e.value)
def test_scan_without_device(self):
self.dbusmock_bluez.AddAdapter(adapter_name, system_name)
connection = bluefang.Bluefang()
devices = connection.scan(timeout_in_ms=1)
assert(len(devices) == 0)
def test_scan(self):
adapter_name = 'hci0'
address = '11:22:33:44:55:66'
alias = 'My Device'
self.dbusmock_bluez.AddAdapter(adapter_name, system_name)
self.dbusmock_bluez.AddDevice(adapter_name, address, alias)
connection = bluefang.Bluefang()
devices = connection.scan(timeout_in_ms=1)
assert(len(devices) == 1)
assert(devices == [
bluefang.BluetoothDevice(
name=alias,
alias=alias,
address=address,
bluetooth_class='Unknown',
is_connected=False,
is_paired=False,
path='/org/bluez/%s/dev_%s' % (adapter_name, address.replace(":", "_"))
)
])
def test_connect_to_unconnected_device(self):
with pytest.raises(Exception) as e:
connection = bluefang.Bluefang()
connection.connect('0E:0E:0E:0E:0E')
assert str(e.value) == "Unable to find device 0E:0E:0E:0E:0E. Try scanning first."
def test_trust_device(self):
adapter_name = 'hci9'
address = '55:22:33:44:66:77'
alias = 'My Device'
self.dbusmock_bluez.AddAdapter(adapter_name, system_name)
self.dbusmock_bluez.AddDevice(adapter_name, address, alias)
connection = bluefang.Bluefang()
connection.agent.trust_device('/org/bluez/hci9/dev_55_22_33_44_66_77')
adapter = dbus.Interface(dbus.SystemBus().get_object("org.bluez", '/org/bluez/hci9/dev_55_22_33_44_66_77'), "org.freedesktop.DBus.Properties")
assert(adapter.Get("org.bluez.Device1", "Trusted") == True)
def test_agent_without_adapter(self):
connection = bluefang.Bluefang()
with pytest.raises(Exception) as e:
connection.agent.start()
try:
connection.pair(timeout_in_ms=1)
finally:
connection.agent.stop()
def test_agent(self):
connection = bluefang.Bluefang()
connection.agent.start()
adapter_name = 'hci0'
self.dbusmock_bluez.AddAdapter(adapter_name, system_name)
connection.pair(timeout_in_ms=1)
connection.agent.stop()
adapter = dbus.Interface(dbus.SystemBus().get_object("org.bluez", "/org/bluez/hci0"), "org.freedesktop.DBus.Properties")
assert(adapter.Get("org.bluez.Adapter1", "Discoverable") == True)
def test_register_profile_invalid_path(self):
with pytest.raises(ValueError) as e:
connection = bluefang.Bluefang()
connection.register_profile('somepath')
err_msg = "Invalid object path 'somepath': does not start with '/'"
assert err_msg in str(e.value)
|
{
"content_hash": "677a8a8b64d5478ecd86e305ed3dcd7b",
"timestamp": "",
"source": "github",
"line_count": 141,
"max_line_length": 150,
"avg_line_length": 33.04255319148936,
"alnum_prop": 0.6160120197467268,
"repo_name": "tmcneal/bluefang",
"id": "a0aa15f6ecb6623da55c7db41bf8660f74fb4287",
"size": "4684",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_bluefang.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "53494"
},
{
"name": "Shell",
"bytes": "1392"
}
],
"symlink_target": ""
}
|
import os
import sys
import argparse
from django.core.management import execute_from_command_line
os.environ["DJANGO_SETTINGS_MODULE"] = "wagtailaltgenerator.tests.demosite.settings"
def runtests():
args, rest = argparse.ArgumentParser().parse_known_args()
argv = [sys.argv[0], "test"] + rest
execute_from_command_line(argv)
if __name__ == "__main__":
runtests()
|
{
"content_hash": "4963ab9b54be73b54605d08e4e145848",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 84,
"avg_line_length": 21.444444444444443,
"alnum_prop": 0.7020725388601037,
"repo_name": "marteinn/wagtail-alt-generator",
"id": "278a80ccd00bbd613ccf59ba3cee2f3d964b2eb4",
"size": "386",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "runtests.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Dockerfile",
"bytes": "546"
},
{
"name": "HTML",
"bytes": "4306"
},
{
"name": "Makefile",
"bytes": "131"
},
{
"name": "Python",
"bytes": "41741"
},
{
"name": "Shell",
"bytes": "2372"
}
],
"symlink_target": ""
}
|
import mock
from oslo_log import log as logging
from oslo_reports import guru_meditation_report as gmr
from nova.cmd import baseproxy
from nova import config
from nova.console import websocketproxy
from nova import test
from nova import version
@mock.patch.object(config, 'parse_args', new=lambda *args, **kwargs: None)
class BaseProxyTestCase(test.NoDBTestCase):
@mock.patch('os.path.exists', return_value=False)
# NOTE(mriedem): sys.exit raises TestingException so we can actually exit
# the test normally.
@mock.patch('sys.exit', side_effect=test.TestingException)
def test_proxy_ssl_without_cert(self, mock_exit, mock_exists):
self.flags(ssl_only=True)
self.assertRaises(test.TestingException, baseproxy.proxy,
'0.0.0.0', '6080')
mock_exit.assert_called_once_with(-1)
@mock.patch('os.path.exists', return_value=False)
@mock.patch('sys.exit', side_effect=test.TestingException)
def test_proxy_web_dir_does_not_exist(self, mock_exit, mock_exists):
self.flags(web='/my/fake/webserver/')
self.assertRaises(test.TestingException, baseproxy.proxy,
'0.0.0.0', '6080')
mock_exit.assert_called_once_with(-1)
@mock.patch('os.path.exists', return_value=True)
@mock.patch.object(logging, 'setup')
@mock.patch.object(gmr.TextGuruMeditation, 'setup_autorun')
@mock.patch('nova.console.websocketproxy.NovaWebSocketProxy.__init__',
return_value=None)
@mock.patch('nova.console.websocketproxy.NovaWebSocketProxy.start_server')
def test_proxy(self, mock_start, mock_init, mock_gmr, mock_log,
mock_exists):
# Force verbose=False so something else testing nova.cmd.baseproxy
# doesn't impact the call to mocked NovaWebSocketProxy.__init__.
self.flags(verbose=False)
baseproxy.proxy('0.0.0.0', '6080')
mock_log.assert_called_once_with(baseproxy.CONF, 'nova')
mock_gmr.mock_assert_called_once_with(version)
mock_init.assert_called_once_with(
listen_host='0.0.0.0', listen_port='6080', source_is_ipv6=False,
verbose=False, cert='self.pem', key=None, ssl_only=False,
daemon=False, record=None, traffic=False,
web='/usr/share/spice-html5', file_only=True,
RequestHandlerClass=websocketproxy.NovaProxyRequestHandler)
mock_start.assert_called_once_with()
@mock.patch('sys.stderr.write')
@mock.patch('os.path.exists', return_value=False)
@mock.patch('sys.exit', side_effect=test.TestingException)
def test_proxy_exit_with_error(self, mock_exit, mock_exists, mock_stderr):
self.flags(ssl_only=True)
self.assertRaises(test.TestingException, baseproxy.proxy,
'0.0.0.0', '6080')
mock_stderr.assert_called_once_with(
'SSL only and self.pem not found\n')
mock_exit.assert_called_once_with(-1)
|
{
"content_hash": "f0ffa6076e857d8702613d8ef0b933b5",
"timestamp": "",
"source": "github",
"line_count": 64,
"max_line_length": 78,
"avg_line_length": 46.84375,
"alnum_prop": 0.6604402935290193,
"repo_name": "cyx1231st/nova",
"id": "8c89f8257ec053e033b3eec112712923e09749d8",
"size": "3572",
"binary": false,
"copies": "6",
"ref": "refs/heads/eventually-consistent-host-state-mitaka",
"path": "nova/tests/unit/cmd/test_baseproxy.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "17388661"
},
{
"name": "Shell",
"bytes": "36658"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
}
|
"""Sample that implements a text client for the Google Assistant Service."""
import os
import logging
import json
import click
import google.auth.transport.grpc
import google.auth.transport.requests
import google.oauth2.credentials
from google.assistant.embedded.v1alpha2 import (
embedded_assistant_pb2,
embedded_assistant_pb2_grpc
)
try:
from . import (
assistant_helpers,
browser_helpers,
)
except (SystemError, ImportError):
import assistant_helpers
import browser_helpers
ASSISTANT_API_ENDPOINT = 'embeddedassistant.googleapis.com'
DEFAULT_GRPC_DEADLINE = 60 * 3 + 5
PLAYING = embedded_assistant_pb2.ScreenOutConfig.PLAYING
class SampleTextAssistant(object):
"""Sample Assistant that supports text based conversations.
Args:
language_code: language for the conversation.
device_model_id: identifier of the device model.
device_id: identifier of the registered device instance.
display: enable visual display of assistant response.
channel: authorized gRPC channel for connection to the
Google Assistant API.
deadline_sec: gRPC deadline in seconds for Google Assistant API call.
"""
def __init__(self, language_code, device_model_id, device_id,
display, channel, deadline_sec):
self.language_code = language_code
self.device_model_id = device_model_id
self.device_id = device_id
self.conversation_state = None
# Force reset of first conversation.
self.is_new_conversation = True
self.display = display
self.assistant = embedded_assistant_pb2_grpc.EmbeddedAssistantStub(
channel
)
self.deadline = deadline_sec
def __enter__(self):
return self
def __exit__(self, etype, e, traceback):
if e:
return False
def assist(self, text_query):
"""Send a text request to the Assistant and playback the response.
"""
def iter_assist_requests():
config = embedded_assistant_pb2.AssistConfig(
audio_out_config=embedded_assistant_pb2.AudioOutConfig(
encoding='LINEAR16',
sample_rate_hertz=16000,
volume_percentage=0,
),
dialog_state_in=embedded_assistant_pb2.DialogStateIn(
language_code=self.language_code,
conversation_state=self.conversation_state,
is_new_conversation=self.is_new_conversation,
),
device_config=embedded_assistant_pb2.DeviceConfig(
device_id=self.device_id,
device_model_id=self.device_model_id,
),
text_query=text_query,
)
# Continue current conversation with later requests.
self.is_new_conversation = False
if self.display:
config.screen_out_config.screen_mode = PLAYING
req = embedded_assistant_pb2.AssistRequest(config=config)
assistant_helpers.log_assist_request_without_audio(req)
yield req
text_response = None
html_response = None
for resp in self.assistant.Assist(iter_assist_requests(),
self.deadline):
assistant_helpers.log_assist_response_without_audio(resp)
if resp.screen_out.data:
html_response = resp.screen_out.data
if resp.dialog_state_out.conversation_state:
conversation_state = resp.dialog_state_out.conversation_state
self.conversation_state = conversation_state
if resp.dialog_state_out.supplemental_display_text:
text_response = resp.dialog_state_out.supplemental_display_text
return text_response, html_response
@click.command()
@click.option('--api-endpoint', default=ASSISTANT_API_ENDPOINT,
metavar='<api endpoint>', show_default=True,
help='Address of Google Assistant API service.')
@click.option('--credentials',
metavar='<credentials>', show_default=True,
default=os.path.join(click.get_app_dir('google-oauthlib-tool'),
'credentials.json'),
help='Path to read OAuth2 credentials.')
@click.option('--device-model-id',
metavar='<device model id>',
required=True,
help=(('Unique device model identifier, '
'if not specifed, it is read from --device-config')))
@click.option('--device-id',
metavar='<device id>',
required=True,
help=(('Unique registered device instance identifier, '
'if not specified, it is read from --device-config, '
'if no device_config found: a new device is registered '
'using a unique id and a new device config is saved')))
@click.option('--lang', show_default=True,
metavar='<language code>',
default='en-US',
help='Language code of the Assistant')
@click.option('--display', is_flag=True, default=False,
help='Enable visual display of Assistant responses in HTML.')
@click.option('--verbose', '-v', is_flag=True, default=False,
help='Verbose logging.')
@click.option('--grpc-deadline', default=DEFAULT_GRPC_DEADLINE,
metavar='<grpc deadline>', show_default=True,
help='gRPC deadline in seconds')
def main(api_endpoint, credentials,
device_model_id, device_id, lang, display, verbose,
grpc_deadline, *args, **kwargs):
# Setup logging.
logging.basicConfig(level=logging.DEBUG if verbose else logging.INFO)
# Load OAuth 2.0 credentials.
try:
with open(credentials, 'r') as f:
credentials = google.oauth2.credentials.Credentials(token=None,
**json.load(f))
http_request = google.auth.transport.requests.Request()
credentials.refresh(http_request)
except Exception as e:
logging.error('Error loading credentials: %s', e)
logging.error('Run google-oauthlib-tool to initialize '
'new OAuth 2.0 credentials.')
return
# Create an authorized gRPC channel.
grpc_channel = google.auth.transport.grpc.secure_authorized_channel(
credentials, http_request, api_endpoint)
logging.info('Connecting to %s', api_endpoint)
with SampleTextAssistant(lang, device_model_id, device_id, display,
grpc_channel, grpc_deadline) as assistant:
while True:
query = click.prompt('')
click.echo('<you> %s' % query)
response_text, response_html = assistant.assist(text_query=query)
if display and response_html:
system_browser = browser_helpers.system_browser
system_browser.display(response_html)
if response_text:
click.echo('<@assistant> %s' % response_text)
if __name__ == '__main__':
main()
|
{
"content_hash": "70e03825cb8b2728b34ca22368c5fb31",
"timestamp": "",
"source": "github",
"line_count": 180,
"max_line_length": 79,
"avg_line_length": 40.144444444444446,
"alnum_prop": 0.6046221976197066,
"repo_name": "javier-ruiz-b/docker-rasppi-images",
"id": "c1ef4a3f76f8bec35d5c94a9b9a70f08b57fad40",
"size": "7806",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "raspberry-google-home/env/lib/python3.7/site-packages/googlesamples/assistant/grpc/textinput.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "15254"
},
{
"name": "PHP",
"bytes": "1132"
},
{
"name": "Shell",
"bytes": "17522"
}
],
"symlink_target": ""
}
|
"""
=============
Frangi filter
=============
The Frangi and hybrid Hessian filters can be used to detect continuous
edges, such as vessels, wrinkles, and rivers.
"""
from skimage.data import camera
from skimage.filters import frangi, hessian
import matplotlib.pyplot as plt
image = camera()
fig, ax = plt.subplots(ncols=3, subplot_kw={'adjustable': 'box-forced'})
ax[0].imshow(image, cmap=plt.cm.gray)
ax[0].set_title('Original image')
ax[1].imshow(frangi(image), cmap=plt.cm.gray)
ax[1].set_title('Frangi filter result')
ax[2].imshow(hessian(image), cmap=plt.cm.gray)
ax[2].set_title('Hybrid Hessian filter result')
for a in ax:
a.axis('off')
plt.tight_layout()
|
{
"content_hash": "8b0f934e33f54fc1ae52eac06e9ce5c6",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 72,
"avg_line_length": 21.870967741935484,
"alnum_prop": 0.6946902654867256,
"repo_name": "paalge/scikit-image",
"id": "8c41ce91e4d5d066de72cfa969c06f5edc40fd92",
"size": "678",
"binary": false,
"copies": "7",
"ref": "refs/heads/master",
"path": "doc/examples/filters/plot_frangi.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "235642"
},
{
"name": "C++",
"bytes": "44817"
},
{
"name": "Makefile",
"bytes": "567"
},
{
"name": "Python",
"bytes": "2532932"
}
],
"symlink_target": ""
}
|
import csv
import datetime
import zipfile
from collections import defaultdict
from datetime import timedelta
from io import StringIO
from celery import shared_task
from dateutil import relativedelta
from django.conf import settings
from django.contrib.auth.models import User
from django.core.files.storage import get_storage_class
from django.core.management import call_command
from django.utils import timezone
from onadata.celery import app
from .models.daily_xform_submission_counter import DailyXFormSubmissionCounter
from .models import Instance, XForm
@app.task()
def delete_daily_counters():
today = timezone.now().date()
delta = timedelta(days=settings.DAILY_COUNTERS_MAX_DAYS)
rel_date = today - delta
xform_daily_counters = DailyXFormSubmissionCounter.objects.filter(
date__lte=rel_date
)
xform_daily_counters.delete()
# ## ISSUE 242 TEMPORARY FIX ##
# See https://github.com/kobotoolbox/kobocat/issues/242
@shared_task(soft_time_limit=600, time_limit=900)
def fix_root_node_names(**kwargs):
call_command(
'fix_root_node_names',
**kwargs
)
# #### END ISSUE 242 FIX ######
@shared_task(soft_time_limit=3600, time_limit=3630)
def generate_stats_zip(output_filename):
# Limit to last month and this month
now = datetime.datetime.now()
start_of_last_month = datetime.datetime(
year=now.year, month=now.month, day=1
) - relativedelta.relativedelta(months=1)
REPORTS = {
'instances (since {:%Y-%m-%d}).csv'.format(start_of_last_month): {
'model': Instance,
'date_field': 'date_created'
},
'xforms (since {:%Y-%m-%d}).csv'.format(start_of_last_month): {
'model': XForm,
'date_field': 'date_created'
},
'users (since {:%Y-%m-%d}).csv'.format(start_of_last_month): {
'model': User,
'date_field': 'date_joined'
}
}
def list_created_by_month(model, date_field):
queryset = model.objects.filter(
**{date_field + '__gte': start_of_last_month}
)
# Make a single, huge query to the database
data_dump = list(queryset.values_list('pk', date_field))
# Sort by date
data_dump = sorted(data_dump, key=lambda x: x[1])
year_month_counts = defaultdict(lambda: defaultdict(lambda: 0))
last_pks = defaultdict(lambda: defaultdict(lambda: 0))
for pk, date in data_dump:
year_month_counts[date.year][date.month] += 1
last_pks[date.year][date.month] = pk
results = []
cumulative = 0
for year in sorted(year_month_counts.keys()):
for month in sorted(year_month_counts[year].keys()):
cumulative += year_month_counts[year][month]
results.append((
year, month, year_month_counts[year][month],
cumulative, last_pks[year][month]
))
return results
default_storage = get_storage_class()()
with default_storage.open(output_filename, 'wb') as output_file:
zip_file = zipfile.ZipFile(output_file, 'w', zipfile.ZIP_DEFLATED)
for filename, report_settings in REPORTS.items():
model_name_plural = report_settings[
'model']._meta.verbose_name_plural
fieldnames = [
'Year',
'Month',
f'New {model_name_plural.capitalize()}',
f'Cumulative {model_name_plural.capitalize()}',
'Last Primary Key (possible clue about deleted objects)',
]
data = list_created_by_month(
report_settings['model'], report_settings['date_field'])
csv_io = StringIO()
writer = csv.DictWriter(csv_io, fieldnames=fieldnames)
writer.writeheader()
for row in data:
writer.writerow(dict(zip(fieldnames, row)))
zip_file.writestr(filename, csv_io.getvalue())
csv_io.close()
zip_file.close()
|
{
"content_hash": "58ef54da310d70e018b416b355d3ac7e",
"timestamp": "",
"source": "github",
"line_count": 119,
"max_line_length": 78,
"avg_line_length": 34.226890756302524,
"alnum_prop": 0.6076602013258041,
"repo_name": "kobotoolbox/kobocat",
"id": "e12275fdd5479b18ae5a0a84ff5035d9556ce5e2",
"size": "4089",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "onadata/apps/logger/tasks.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "CSS",
"bytes": "146326"
},
{
"name": "Dockerfile",
"bytes": "3965"
},
{
"name": "HTML",
"bytes": "136962"
},
{
"name": "JavaScript",
"bytes": "734122"
},
{
"name": "Less",
"bytes": "19821"
},
{
"name": "Makefile",
"bytes": "2286"
},
{
"name": "Python",
"bytes": "1264157"
},
{
"name": "Shell",
"bytes": "9858"
}
],
"symlink_target": ""
}
|
import sys, getopt, argparse
from seldon.microservice import Microservices
if __name__ == "__main__":
import logging
logger = logging.getLogger()
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(name)s : %(message)s', level=logging.DEBUG)
logger.setLevel(logging.INFO)
parser = argparse.ArgumentParser(prog='microservice')
parser.add_argument('--model_name', help='name of model', required=True)
parser.add_argument('--pipeline', help='location of prediction pipeline', required=True)
parser.add_argument('--aws_key', help='aws key', required=False)
parser.add_argument('--aws_secret', help='aws secret', required=False)
args = parser.parse_args()
opts = vars(args)
m = Microservices(aws_key=args.aws_key,aws_secret=args.aws_secret)
app = m.create_prediction_microservice(args.pipeline,args.model_name)
app.run(host="0.0.0.0", debug=False)
|
{
"content_hash": "1a35908bae35463fd27f94bb6b49946f",
"timestamp": "",
"source": "github",
"line_count": 24,
"max_line_length": 107,
"avg_line_length": 38.083333333333336,
"alnum_prop": 0.6936542669584245,
"repo_name": "SeldonIO/seldon-server",
"id": "d8020ce740e40babbce36c4291810192a9869911",
"size": "914",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "docker/pyseldon/scripts/start_prediction_microservice.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Java",
"bytes": "1926357"
},
{
"name": "JavaScript",
"bytes": "48430"
},
{
"name": "Jupyter Notebook",
"bytes": "112349"
},
{
"name": "Makefile",
"bytes": "56033"
},
{
"name": "Python",
"bytes": "595373"
},
{
"name": "Ruby",
"bytes": "423"
},
{
"name": "Scala",
"bytes": "378790"
},
{
"name": "Shell",
"bytes": "122552"
}
],
"symlink_target": ""
}
|
from neutron_lib.api import extensions
from neutron_lib import constants as consts
from neutron_lib.plugins import constants as plugin_constants
from neutron_lib.plugins import directory
from oslo_log import log as logging
import oslo_messaging
LOG = logging.getLogger(__name__)
class MeteringRpcCallbacks(object):
target = oslo_messaging.Target(version='1.0')
def __init__(self, meter_plugin):
self.meter_plugin = meter_plugin
def get_sync_data_metering(self, context, **kwargs):
l3_plugin = directory.get_plugin(plugin_constants.L3)
if not l3_plugin:
return
metering_data = self.meter_plugin.get_sync_data_metering(context)
host = kwargs.get('host')
if not extensions.is_extension_supported(
l3_plugin, consts.L3_AGENT_SCHEDULER_EXT_ALIAS) or not host:
return metering_data
else:
agents = l3_plugin.get_l3_agents(context, filters={'host': [host]})
if not agents:
LOG.error('Unable to find agent on host %s.', host)
return
router_ids = []
for agent in agents:
routers = l3_plugin.list_routers_on_l3_agent(context, agent.id)
router_ids += [router['id'] for router in routers['routers']]
if not router_ids:
return
else:
return [
router for router in metering_data
if router['id'] in router_ids
]
|
{
"content_hash": "7c922284897ebb370c2de1d307336db6",
"timestamp": "",
"source": "github",
"line_count": 45,
"max_line_length": 79,
"avg_line_length": 34.044444444444444,
"alnum_prop": 0.5966057441253264,
"repo_name": "huntxu/neutron",
"id": "07a2477ea0d5030c15c601726d313f5cfc11eff7",
"size": "2139",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "neutron/db/metering/metering_rpc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Mako",
"bytes": "1047"
},
{
"name": "Python",
"bytes": "11111676"
},
{
"name": "Shell",
"bytes": "37514"
}
],
"symlink_target": ""
}
|
from django.shortcuts import get_object_or_404, render_to_response
from django.template import RequestContext
from moocng.categories.models import Category
def category(request, categories):
cat_list = []
for cat in categories.split('/'):
cat_list.append(get_object_or_404(Category, slug=cat))
# intersection of the courses of all categories
courses = cat_list[0].courses.all()
for cat in cat_list[1:]:
courses = [c for c in cat.courses.all() if c in courses]
return render_to_response('categories/category.html', {
'first_category': cat_list[0],
'other_categories': cat_list[1:],
'courses': courses,
}, context_instance=RequestContext(request))
|
{
"content_hash": "3c2e97b8e13e0f54f96f96284e49262e",
"timestamp": "",
"source": "github",
"line_count": 21,
"max_line_length": 66,
"avg_line_length": 34.23809523809524,
"alnum_prop": 0.6815020862308763,
"repo_name": "GeographicaGS/moocng",
"id": "5476564d1bad6830e2309a343e864409548f2306",
"size": "719",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "moocng/categories/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "162701"
},
{
"name": "HTML",
"bytes": "362912"
},
{
"name": "JavaScript",
"bytes": "1911286"
},
{
"name": "Python",
"bytes": "2723710"
},
{
"name": "Shell",
"bytes": "24842"
}
],
"symlink_target": ""
}
|
"""Calculate the molecular weight given a molecular formula
Parse the formula using parglare.
This example is based on the example from
PLY compared with pyparsing and ANTLR by Andrew Dalke
http://www.dalkescientific.com/writings/diary/archive/2007/11/03/antlr_java.html
"""
from parglare import Grammar, Parser
grammar = r"""
mw: EMPTY | formula;
formula: species | formula species;
species: ATOM DIGITS | ATOM;
terminals
DIGITS: /\d+/;
"""
mw_table = {
'H': 1.00794,
'C': 12.001,
'Cl': 35.453,
'O': 15.999,
'S': 32.06,
}
atom_names = sorted(
mw_table.keys(),
key=lambda symbol: (symbol[0], -len(symbol), symbol))
# Creates a pattern like: Cl|C|H|O|S
atom_pattern = "|".join(atom_names)
# Extend grammar definition with the ATOM rule
grammar += '\nATOM: /{}/;'.format(atom_pattern)
actions = {
'mw': [lambda _, __: 0.0,
lambda _, nodes: nodes[0]],
'formula': [lambda _, nodes: nodes[0],
lambda _, nodes: nodes[0] + nodes[1]],
'species': [lambda _, nodes: nodes[0] * nodes[1],
lambda _, nodes: nodes[0]],
'ATOM': lambda _, value: mw_table[value],
'DIGITS': lambda _, value: int(value)
}
parser = Parser(Grammar.from_string(grammar), actions=actions)
def calculate_mw(formula):
return parser.parse(formula)
|
{
"content_hash": "8de720c9dc2b763412e3e43fada4401c",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 80,
"avg_line_length": 25.25,
"alnum_prop": 0.6336633663366337,
"repo_name": "igordejanovic/parglare",
"id": "9805b4667bbeecd1ea84edfca827759c5c8b239b",
"size": "1313",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "examples/molecular_formulas/parglare_mw.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "2131"
},
{
"name": "Python",
"bytes": "402620"
},
{
"name": "Shell",
"bytes": "684"
}
],
"symlink_target": ""
}
|
"""Control tasks execution order"""
import fnmatch
from collections import deque
from collections import OrderedDict
import re
import six
from .exceptions import InvalidTask, InvalidCommand, InvalidDodoFile
from .cmdparse import TaskParse, CmdOption
from .task import Task, DelayedLoaded
from .loader import generate_tasks
class RegexGroup(object):
'''Helper to keep track of all delayed-tasks which regexp target
matches the target specified from command line.
'''
def __init__(self, target, tasks):
# target name specified in command line
self.target = target
# set of delayed-tasks names (string)
self.tasks = tasks
# keep track if the target was already found
self.found = False
class TaskControl(object):
"""Manages tasks inter-relationship
There are 3 phases
1) the constructor gets a list of tasks and do initialization
2) 'process' the command line options for tasks are processed
3) 'task_dispatcher' dispatch tasks to runner
Process dependencies and targets to find out the order tasks
should be executed. Also apply filter to exclude tasks from
execution. And parse task cmd line options.
@ivar tasks: (dict) Key: task name ([taskgen.]name)
Value: L{Task} instance
@ivar targets: (dict) Key: fileName
Value: task_name
"""
def __init__(self, task_list, auto_delayed_regex=False):
self.tasks = OrderedDict()
self.targets = {}
self.auto_delayed_regex = auto_delayed_regex
# name of task in order to be executed
# this the order as in the dodo file. the real execution
# order might be different if the dependecies require so.
self._def_order = []
# list of tasks selected to be executed
self.selected_tasks = None
# sanity check and create tasks dict
for task in task_list:
# task must be a Task
if not isinstance(task, Task):
msg = "Task must an instance of Task class. %s"
raise InvalidTask(msg % (task.__class__))
# task name must be unique
if task.name in self.tasks:
msg = "Task names must be unique. %s"
raise InvalidDodoFile(msg % task.name)
self.tasks[task.name] = task
self._def_order.append(task.name)
# expand wild-card task-dependencies
for task in six.itervalues(self.tasks):
for pattern in task.wild_dep:
task.task_dep.extend(self._get_wild_tasks(pattern))
self._check_dep_names()
self.set_implicit_deps(self.targets, task_list)
def _check_dep_names(self):
"""check if user input task_dep or setup_task that doesnt exist"""
# check task-dependencies exist.
for task in six.itervalues(self.tasks):
for dep in task.task_dep:
if dep not in self.tasks:
msg = "%s. Task dependency '%s' does not exist."
raise InvalidTask(msg% (task.name, dep))
for setup_task in task.setup_tasks:
if setup_task not in self.tasks:
msg = "Task '%s': invalid setup task '%s'."
raise InvalidTask(msg % (task.name, setup_task))
@staticmethod
def set_implicit_deps(targets, task_list):
"""set/add task_dep based on file_dep on a target from another task
@param targets: (dict) fileName -> task_name
@param task_list: (list - Task) task with newly added file_dep
"""
# 1) create a dictionary associating every target->task. where the task
# builds that target.
for task in task_list:
for target in task.targets:
if target in targets:
msg = ("Two different tasks can't have a common target." +
"'%s' is a target for %s and %s.")
raise InvalidTask(msg % (target, task.name,
targets[target]))
targets[target] = task.name
# 2) now go through all dependencies and check if they are target from
# another task.
# FIXME - when used with delayed tasks needs to check if
# any new target matches any old file_dep.
for task in task_list:
TaskControl.add_implicit_task_dep(targets, task, task.file_dep)
@staticmethod
def add_implicit_task_dep(targets, task, deps_list):
"""add implicit task_dep for `task` for newly added `file_dep`
@param targets: (dict) fileName -> task_name
@param task: (Task) task with newly added file_dep
@param dep_list: (list - str): list of file_dep for task
"""
for dep in deps_list:
if (dep in targets and targets[dep] not in task.task_dep):
task.task_dep.append(targets[dep])
def _get_wild_tasks(self, pattern):
"""get list of tasks that match pattern"""
wild_list = []
for t_name in self._def_order:
if fnmatch.fnmatch(t_name, pattern):
wild_list.append(t_name)
return wild_list
def _process_filter(self, task_selection):
"""process cmd line task options
[task_name [-task_opt [opt_value]] ...] ...
@param task_selection: list of strings with task names/params or target
@return list of task names. Expanding glob and removed params
"""
filter_list = []
def add_filtered_task(seq, f_name):
"""add task to list `filter_list` and set task.options from params
@return list - str: of elements not yet
"""
filter_list.append(f_name)
# only tasks specified by name can contain parameters
if f_name in self.tasks:
# parse task_selection
the_task = self.tasks[f_name]
# remaining items are other tasks not positional options
taskcmd = TaskParse([CmdOption(opt) for opt in the_task.params])
the_task.options, seq = taskcmd.parse(seq)
# if task takes positional parameters set all as pos_arg_val
if the_task.pos_arg is not None:
the_task.pos_arg_val = seq
seq = []
return seq
# process...
seq = task_selection[:]
# process cmd_opts until nothing left
while seq:
f_name = seq.pop(0) # always start with a task/target name
# select tasks by task-name pattern
if '*' in f_name:
for task_name in self._get_wild_tasks(f_name):
add_filtered_task((), task_name)
else:
seq = add_filtered_task(seq, f_name)
return filter_list
def _filter_tasks(self, task_selection):
"""Select tasks specified by filter.
@param task_selection: list of strings with task names/params or target
@return (list) of string. where elements are task name.
"""
selected_task = []
filter_list = self._process_filter(task_selection)
for filter_ in filter_list:
# by task name
if filter_ in self.tasks:
selected_task.append(filter_)
continue
# by target
if filter_ in self.targets:
selected_task.append(self.targets[filter_])
continue
# if can not find name check if it is a sub-task of a delayed
basename = filter_.split(':', 1)[0]
if basename in self.tasks:
loader = self.tasks[basename].loader
if not loader:
raise InvalidCommand(not_found=filter_)
loader.basename = basename
self.tasks[filter_] = Task(filter_, None, loader=loader)
selected_task.append(filter_)
continue
# check if target matches any regex
delayed_matched = [] # list of Task
for task in list(self.tasks.values()):
if not task.loader:
continue
if task.name.startswith('_regex_target'):
continue
if task.loader.target_regex:
if re.match(task.loader.target_regex, filter_):
delayed_matched.append(task)
elif self.auto_delayed_regex:
delayed_matched.append(task)
delayed_matched_names = [t.name for t in delayed_matched]
regex_group = RegexGroup(filter_, set(delayed_matched_names))
# create extra tasks to load delayed tasks matched by regex
for task in delayed_matched:
loader = task.loader
loader.basename = task.name
name = '{}_{}:{}'.format('_regex_target', filter_, task.name)
loader.regex_groups[name] = regex_group
self.tasks[name] = Task(name, None,
loader=loader,
file_dep=[filter_])
selected_task.append(name)
if not delayed_matched:
# not found
raise InvalidCommand(not_found=filter_)
return selected_task
def process(self, task_selection):
"""
@param task_selection: list of strings with task names/params
@return (list - string) each element is the name of a task
"""
# execute only tasks in the filter in the order specified by filter
if task_selection is not None:
self.selected_tasks = self._filter_tasks(task_selection)
else:
# if no filter is defined execute all tasks
# in the order they were defined.
self.selected_tasks = self._def_order
def task_dispatcher(self):
"""return a TaskDispatcher generator
"""
assert self.selected_tasks is not None, \
"must call 'process' before this"
return TaskDispatcher(self.tasks, self.targets, self.selected_tasks)
class ExecNode(object):
"""Each task will have an instace of this
This used to keep track of waiting events and the generator for dep nodes
@ivar run_status (str): contains the result of Dependency.get_status().status
modified by runner, value can be:
- None: not processed yet
- run: task is selected to be executed (it might be running or
waiting for setup)
- ignore: task wont be executed (user forced deselect)
- up-to-date: task wont be executed (no need)
- done: task finished its execution
"""
def __init__(self, task, parent):
self.task = task
# list of dependencies not processed by _add_task yet
self.task_dep = task.task_dep[:]
self.calc_dep = task.calc_dep.copy()
# ancestors are used to detect cyclic references.
# it does not contain a list of tasks that depends on this node
# for that check the attribute waiting_me
self.ancestors = []
if parent:
self.ancestors.extend(parent.ancestors)
self.ancestors.append(task.name)
# Wait for a task to be selected to its execution
# checking if it is up-to-date
self.wait_select = False
# Wait for a task to finish its execution
self.wait_run = set() # task names
self.wait_run_calc = set() # task names
self.waiting_me = set() # ExecNode
self.run_status = None
# all ancestors that failed
self.bad_deps = []
self.ignored_deps = []
# generator from TaskDispatcher._add_task
self.generator = None
def reset_task(self, task, generator):
"""reset task & generator after task is created by its own `loader`"""
self.task = task
self.task_dep = task.task_dep[:]
self.calc_dep = task.calc_dep.copy()
self.generator = generator
def parent_status(self, parent_node):
if parent_node.run_status == 'failure':
self.bad_deps.append(parent_node)
elif parent_node.run_status == 'ignore':
self.ignored_deps.append(parent_node)
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.task.name)
def step(self):
"""get node's next step"""
try:
return next(self.generator)
except StopIteration:
return None
def no_none(decorated):
"""decorator for a generator to discard/filter-out None values"""
def _func(*args, **kwargs):
"""wrap generator"""
for value in decorated(*args, **kwargs):
if value is not None:
yield value
return _func
class TaskDispatcher(object):
"""Dispatch another task to be selected/executed, mostly handle with MP
Note that a dispatched task might not be ready to be executed.
"""
def __init__(self, tasks, targets, selected_tasks):
self.tasks = tasks
self.targets = targets
self.nodes = {} # key task-name, value: ExecNode
# queues
self.waiting = set() # of ExecNode
self.ready = deque() # of ExecNode
self.generator = self._dispatcher_generator(selected_tasks)
def _gen_node(self, parent, task_name):
"""return ExecNode for task_name if not created yet"""
node = self.nodes.get(task_name, None)
# first time, create node
if node is None:
node = ExecNode(self.tasks[task_name], parent)
node.generator = self._add_task(node)
self.nodes[task_name] = node
return node
# detect cyclic/recursive dependencies
if parent and task_name in parent.ancestors:
msg = "Cyclic/recursive dependencies for task %s: [%s]"
cycle = " -> ".join(parent.ancestors + [task_name])
raise InvalidDodoFile(msg % (task_name, cycle))
def _node_add_wait_run(self, node, task_list, calc=False):
"""updates node.wait_run
@param node (ExecNode)
@param task_list (list - str) tasks that node should wait for
@param calc (bool) task_list is for calc_dep
"""
# wait_for: contains tasks that `node` needs to wait for and
# were not executed yet.
wait_for = set()
for name in task_list:
dep_node = self.nodes[name]
if (not dep_node) or dep_node.run_status in (None, 'run'):
wait_for.add(name)
else:
# if dep task was already executed:
# a) set parent status
node.parent_status(dep_node)
# b) update dependencies from calc_dep results
if calc:
self._process_calc_dep_results(dep_node, node)
# update ExecNode setting parent/dependent relationship
for name in wait_for:
self.nodes[name].waiting_me.add(node)
if calc:
node.wait_run_calc.update(wait_for)
else:
node.wait_run.update(wait_for)
@no_none
def _add_task(self, node):
"""@return a generator that produces:
- ExecNode for task dependencies
- 'wait' to wait for an event (i.e. a dep task run)
- Task when ready to be dispatched to runner (run or be selected)
- None values are of no interest and are filtered out
by the decorator no_none
note that after a 'wait' is sent it is the reponsability of the
caller to ensure the current ExecNode cleared all its waiting
before calling `next()` again on this generator
"""
this_task = node.task
# skip this task if task belongs to a regex_group that already
# executed the task used to build the given target
if this_task.loader:
regex_group = this_task.loader.regex_groups.get(this_task.name, None)
if regex_group and regex_group.found:
return
# add calc_dep & task_dep until all processed
# calc_dep may add more deps so need to loop until nothing left
while True:
calc_dep_list = list(node.calc_dep)
node.calc_dep.clear()
task_dep_list = node.task_dep[:]
node.task_dep = []
for calc_dep in calc_dep_list:
yield self._gen_node(node, calc_dep)
self._node_add_wait_run(node, calc_dep_list, calc=True)
# add task_dep
for task_dep in task_dep_list:
yield self._gen_node(node, task_dep)
self._node_add_wait_run(node, task_dep_list)
# do not wait until all possible task_dep are created
if (node.calc_dep or node.task_dep):
continue # pragma: no cover # coverage cant catch this #198
elif (node.wait_run or node.wait_run_calc):
yield 'wait'
else:
break
# generate tasks from a DelayedLoader
if this_task.loader:
ref = this_task.loader.creator
to_load = this_task.loader.basename or this_task.name
this_loader = self.tasks[to_load].loader
if this_loader and not this_loader.created:
new_tasks = generate_tasks(to_load, ref(), ref.__doc__)
TaskControl.set_implicit_deps(self.targets, new_tasks)
for nt in new_tasks:
if not nt.loader:
nt.loader = DelayedLoaded
self.tasks[nt.name] = nt
# check itself for implicit dep (used by regex_target)
TaskControl.add_implicit_task_dep(
self.targets, this_task, this_task.file_dep)
# remove file_dep since generated tasks are not required
# to really create the target (support multiple matches)
if regex_group:
this_task.file_dep = {}
if regex_group.target in self.targets:
regex_group.found = True
else:
regex_group.tasks.remove(this_task.loader.basename)
if len(regex_group.tasks) == 0:
# In case no task is left, we cannot find a task
# generating this target. Print an error message!
raise InvalidCommand(not_found=regex_group.target)
# mark this loader to not be executed again
this_task.loader.created = True
this_task.loader = DelayedLoaded
# this task was placeholder to execute the loader
# now it needs to be re-processed with the real task
yield "reset generator"
assert False, "This generator can not be used again"
# add itself
yield this_task
# tasks that contain setup-tasks need to be yielded twice
if this_task.setup_tasks:
# run_status None means task is waiting for other tasks
# in order to check if up-to-date. so it needs to wait
# before scheduling its setup-tasks.
if node.run_status is None:
node.wait_select = True
yield "wait"
# if this task should run, so schedule setup-tasks before itself
if node.run_status == 'run':
for setup_task in this_task.setup_tasks:
yield self._gen_node(node, setup_task)
self._node_add_wait_run(node, this_task.setup_tasks)
if node.wait_run:
yield 'wait'
# re-send this task after setup_tasks are sent
yield this_task
def _get_next_node(self, ready, tasks_to_run):
"""get ExecNode from (in order):
.1 ready
.2 tasks_to_run (list in reverse order)
"""
if ready:
return ready.popleft()
# get task group from tasks_to_run
while tasks_to_run:
task_name = tasks_to_run.pop()
node = self._gen_node(None, task_name)
if node:
return node
def _update_waiting(self, processed):
"""updates 'ready' and 'waiting' queues after processed
@param processed (ExecNode) or None
"""
# no task processed, just ignore
if processed is None:
return
node = processed
# if node was waiting select must only receive select event
if node.wait_select:
self.ready.append(node)
self.waiting.remove(node)
node.wait_select = False
# status == run means this was not just select completed
if node.run_status == 'run':
return
for waiting_node in node.waiting_me:
waiting_node.parent_status(node)
# is_ready indicates if node.generator can be invoked again
task_name = node.task.name
# node wait_run will be ready if there are nothing left to wait
if task_name in waiting_node.wait_run:
waiting_node.wait_run.remove(task_name)
is_ready = not (waiting_node.wait_run or
waiting_node.wait_run_calc)
# node wait_run_calc
else:
assert task_name in waiting_node.wait_run_calc
waiting_node.wait_run_calc.remove(task_name)
# calc_dep might add new deps that can be run without
# waiting for the completion of the remaining deps
is_ready = True
self._process_calc_dep_results(node, waiting_node)
# this node can be further processed
if is_ready and (waiting_node in self.waiting):
self.ready.append(waiting_node)
self.waiting.remove(waiting_node)
def _process_calc_dep_results(self, node, waiting_node):
# refresh this task dependencies with values got from calc_dep
values = node.task.values
len_task_deps = len(waiting_node.task.task_dep)
old_calc_dep = waiting_node.task.calc_dep.copy()
waiting_node.task.update_deps(values)
TaskControl.add_implicit_task_dep(
self.targets, waiting_node.task,
values.get('file_dep', []))
# update node's list of non-processed dependencies
new_task_dep = waiting_node.task.task_dep[len_task_deps:]
waiting_node.task_dep.extend(new_task_dep)
new_calc_dep = waiting_node.task.calc_dep - old_calc_dep
waiting_node.calc_dep.update(new_calc_dep)
def _dispatcher_generator(self, selected_tasks):
"""return generator dispatching tasks"""
# each selected task will create a tree (from dependencies) of
# tasks to be processed
tasks_to_run = list(reversed(selected_tasks))
node = None # current active ExecNode
while True:
# get current node
if not node:
node = self._get_next_node(self.ready, tasks_to_run)
if not node:
if self.waiting:
# all tasks are waiting, hold on
processed = (yield "hold on")
self._update_waiting(processed)
continue
# we are done!
return
# get next step from current node
next_step = node.step()
# got None, nothing left for this generator
if next_step is None:
node = None
continue
# got a task, send ExecNode to runner
if isinstance(next_step, Task):
processed = (yield self.nodes[next_step.name])
self._update_waiting(processed)
# got new ExecNode, add to ready_queue
elif isinstance(next_step, ExecNode):
self.ready.append(next_step)
# node just performed a delayed creation of tasks, restart
elif next_step == "reset generator":
node.reset_task(self.tasks[node.task.name],
self._add_task(node))
# got 'wait', add ExecNode to waiting queue
else:
assert next_step == "wait"
self.waiting.add(node)
node = None
|
{
"content_hash": "338d0b051e374245c5a2052028221858",
"timestamp": "",
"source": "github",
"line_count": 650,
"max_line_length": 81,
"avg_line_length": 38.04461538461538,
"alnum_prop": 0.5649642120587165,
"repo_name": "gh0std4ncer/doit",
"id": "eb4d35f2bcfd3d754b634e745613073926ab9bc3",
"size": "24729",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "doit/control.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "491553"
}
],
"symlink_target": ""
}
|
from __future__ import division
from collections import defaultdict
import datetime
import re
import sys
import time
timestamp_pattern = r'(?P<timestamp>\d{4}-\d{2}-\d{2} \d{2}:\d{2}:\d{2},\d{3})'
message_pattern = r'(?P<message>.*)'
log_re = re.compile(r'^%s: \w+: %s\s*$' % (timestamp_pattern, message_pattern))
task_pattern = r'(?P<dataset>\w+), (?P<task>\d+)'
start_re = re.compile(r'^Assigning.*: %s\s*$' % task_pattern)
stop_re = re.compile(r'^Slave \d+ report.*: %s\s*$' % task_pattern)
class Interval(object):
def __init__(self, first):
self.first = first
self.last = first
def update(self, last):
self.last = last
def seconds(self):
delta = parse_timestamp(self.last) - parse_timestamp(self.first)
return delta.total_seconds()
def parse_timestamp(timestamp):
"""Convert a timestamp string to a datetime object."""
sec, ms = timestamp.split(',')
microseconds = 1000 * int(ms)
fields = time.strptime(sec, '%Y-%m-%d %H:%M:%S')[0:6] + (microseconds,)
return datetime.datetime(*fields)
def parse_start(string):
mo = start_re.search(string)
if mo:
return mo.group('dataset'), mo.group('task')
else:
return None, None
def parse_stop(string):
mo = stop_re.search(string)
if mo:
return mo.group('dataset'), mo.group('task')
else:
return None, None
def load():
datasets = []
task_map = defaultdict(dict)
for line in sys.stdin:
mo = log_re.search(line)
if mo:
timestamp = mo.group('timestamp')
message = mo.group('message')
dataset, task = parse_start(message)
if dataset:
interval = Interval(timestamp)
task_map[dataset][task] = interval
if dataset not in datasets:
datasets.append(dataset)
else:
dataset, task = parse_stop(message)
if dataset:
task_map[dataset][task].update(timestamp)
else:
print('Malformed input:')
print('"%s"' % line)
averages = []
for dataset in datasets:
intervals = task_map[dataset].values()
average = sum(x.seconds() for x in intervals) / len(intervals)
averages.append((dataset, average))
return averages
def timing():
for dataset, average in load():
print(dataset, ' ', average)
if __name__ == '__main__':
timing()
# vim: et sw=4 sts=4
|
{
"content_hash": "10cd98ad264a03afd4a88877163caa91",
"timestamp": "",
"source": "github",
"line_count": 88,
"max_line_length": 79,
"avg_line_length": 28.420454545454547,
"alnum_prop": 0.5737704918032787,
"repo_name": "WillChilds-Klein/mistress-mapreduce",
"id": "d588ff184c56319f9db9b429036dae23502788e8",
"size": "3124",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "util/task_times.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "1554"
},
{
"name": "Java",
"bytes": "56524"
},
{
"name": "Makefile",
"bytes": "125"
},
{
"name": "Python",
"bytes": "614350"
},
{
"name": "Shell",
"bytes": "8464"
}
],
"symlink_target": ""
}
|
"""Statistical Language Processing tools. (Chapter 22)
We define Unigram and Ngram text models, use them to generate random text,
and show the Viterbi algorithm for segmentatioon of letters into words.
Then we show a very simple Information Retrieval system, and an example
working on a tiny sample of Unix manual pages."""
from utils import argmin
from learning import CountingProbDist
import search
from math import log, exp
from collections import defaultdict
import heapq
import re
import os
class UnigramTextModel(CountingProbDist):
"""This is a discrete probability distribution over words, so you
can add, sample, or get P[word], just like with CountingProbDist. You can
also generate a random text n words long with P.samples(n)"""
def samples(self, n):
"Return a string of n words, random according to the model."
return ' '.join(self.sample() for i in range(n))
class NgramTextModel(CountingProbDist):
"""This is a discrete probability distribution over n-tuples of words.
You can add, sample or get P[(word1, ..., wordn)]. The method P.samples(n)
builds up an n-word sequence; P.add and P.add_sequence add data."""
def __init__(self, n, observation_sequence=[], default=0):
# In addition to the dictionary of n-tuples, cond_prob is a
# mapping from (w1, ..., wn-1) to P(wn | w1, ... wn-1)
CountingProbDist.__init__(self, default=default)
self.n = n
self.cond_prob = defaultdict()
self.add_sequence(observation_sequence)
# __getitem__, top, sample inherited from CountingProbDist
# Note they deal with tuples, not strings, as inputs
def add(self, ngram):
"""Count 1 for P[(w1, ..., wn)] and for P(wn | (w1, ..., wn-1)"""
CountingProbDist.add(self, ngram)
if ngram[:-1] not in self.cond_prob:
self.cond_prob[ngram[:-1]] = CountingProbDist()
self.cond_prob[ngram[:-1]].add(ngram[-1])
def add_empty(self, words, n):
return [''] * (n - 1) + words
def add_sequence(self, words):
"""Add each of the tuple words[i:i+n], using a sliding window.
Prefix some copies of the empty word, '', to make the start work."""
n = self.n
words = self.add_empty(words, n)
for i in range(len(words) - n):
self.add(tuple(words[i:i + n]))
def samples(self, nwords):
"""Build up a random sample of text nwords words long, using
the conditional probability given the n-1 preceding words."""
n = self.n
nminus1gram = ('',) * (n-1)
output = []
for i in range(nwords):
if nminus1gram not in self.cond_prob:
nminus1gram = ('',) * (n-1) # Cannot continue, so restart.
wn = self.cond_prob[nminus1gram].sample()
output.append(wn)
nminus1gram = nminus1gram[1:] + (wn,)
return ' '.join(output)
class NgramCharModel(NgramTextModel):
def add_empty(self, words, n):
return ' ' * (n - 1) + words
def add_sequence(self, words):
for word in words:
super().add_sequence(word)
# ______________________________________________________________________________
def viterbi_segment(text, P):
"""Find the best segmentation of the string of characters, given the
UnigramTextModel P."""
# best[i] = best probability for text[0:i]
# words[i] = best word ending at position i
n = len(text)
words = [''] + list(text)
best = [1.0] + [0.0] * n
# Fill in the vectors best, words via dynamic programming
for i in range(n+1):
for j in range(0, i):
w = text[j:i]
if P[w] * best[i - len(w)] >= best[i]:
best[i] = P[w] * best[i - len(w)]
words[i] = w
# Now recover the sequence of best words
sequence = []
i = len(words) - 1
while i > 0:
sequence[0:0] = [words[i]]
i = i - len(words[i])
# Return sequence of best words and overall probability
return sequence, best[-1]
# ______________________________________________________________________________
# TODO(tmrts): Expose raw index
class IRSystem:
"""A very simple Information Retrieval System, as discussed in Sect. 23.2.
The constructor s = IRSystem('the a') builds an empty system with two
stopwords. Next, index several documents with s.index_document(text, url).
Then ask queries with s.query('query words', n) to retrieve the top n
matching documents. Queries are literal words from the document,
except that stopwords are ignored, and there is one special syntax:
The query "learn: man cat", for example, runs "man cat" and indexes it."""
def __init__(self, stopwords='the a of'):
"""Create an IR System. Optionally specify stopwords."""
# index is a map of {word: {docid: count}}, where docid is an int,
# indicating the index into the documents list.
self.index = defaultdict(lambda: defaultdict(int))
self.stopwords = set(words(stopwords))
self.documents = []
def index_collection(self, filenames):
"Index a whole collection of files."
prefix = os.path.dirname(__file__)
for filename in filenames:
self.index_document(open(filename).read(),
os.path.relpath(filename, prefix))
def index_document(self, text, url):
"Index the text of a document."
# For now, use first line for title
title = text[:text.index('\n')].strip()
docwords = words(text)
docid = len(self.documents)
self.documents.append(Document(title, url, len(docwords)))
for word in docwords:
if word not in self.stopwords:
self.index[word][docid] += 1
def query(self, query_text, n=10):
"""Return a list of n (score, docid) pairs for the best matches.
Also handle the special syntax for 'learn: command'."""
if query_text.startswith("learn:"):
doctext = os.popen(query_text[len("learn:"):], 'r').read()
self.index_document(doctext, query_text)
return []
qwords = [w for w in words(query_text) if w not in self.stopwords]
shortest = argmin(qwords, key=lambda w: len(self.index[w]))
docids = self.index[shortest]
return heapq.nlargest(n, ((self.total_score(qwords, docid), docid) for docid in docids))
def score(self, word, docid):
"""Compute a score for this word on the document with this docid."""
# There are many options; here we take a very simple approach
return (log(1 + self.index[word][docid]) /
log(1 + self.documents[docid].nwords))
def total_score(self, words, docid):
"""Compute the sum of the scores of these words on the document with this docid."""
return sum(self.score(word, docid) for word in words)
def present(self, results):
"""Present the results as a list."""
for (score, docid) in results:
doc = self.documents[docid]
print(
("{:5.2}|{:25} | {}".format(100 * score, doc.url,
doc.title[:45].expandtabs())))
def present_results(self, query_text, n=10):
"""Get results for the query and present them."""
self.present(self.query(query_text, n))
class UnixConsultant(IRSystem):
"""A trivial IR system over a small collection of Unix man pages."""
def __init__(self):
IRSystem.__init__(self, stopwords="how do i the a of")
import os
aima_root = os.path.dirname(__file__)
mandir = os.path.join(aima_root, 'aima-data/MAN/')
man_files = [mandir + f for f in os.listdir(mandir)
if f.endswith('.txt')]
self.index_collection(man_files)
class Document:
"""Metadata for a document: title and url; maybe add others later."""
def __init__(self, title, url, nwords):
self.title = title
self.url = url
self.nwords = nwords
def words(text, reg=re.compile('[a-z0-9]+')):
"""Return a list of the words in text, ignoring punctuation and
converting everything to lowercase (to canonicalize).
>>> words("``EGAD!'' Edgar cried.")
['egad', 'edgar', 'cried']
"""
return reg.findall(text.lower())
def canonicalize(text):
"""Return a canonical text: only lowercase letters and blanks.
>>> canonicalize("``EGAD!'' Edgar cried.")
'egad edgar cried'
"""
return ' '.join(words(text))
# ______________________________________________________________________________
# Example application (not in book): decode a cipher.
# A cipher is a code that substitutes one character for another.
# A shift cipher is a rotation of the letters in the alphabet,
# such as the famous rot13, which maps A to N, B to M, etc.
alphabet = 'abcdefghijklmnopqrstuvwxyz'
# Encoding
def shift_encode(plaintext, n):
"""Encode text with a shift cipher that moves each letter up by n letters.
>>> shift_encode('abc z', 1)
'bcd a'
"""
return encode(plaintext, alphabet[n:] + alphabet[:n])
def rot13(plaintext):
"""Encode text by rotating letters by 13 spaces in the alphabet.
>>> rot13('hello')
'uryyb'
>>> rot13(rot13('hello'))
'hello'
"""
return shift_encode(plaintext, 13)
def translate(plaintext, function):
"""Translate chars of a plaintext with the given function."""
result = ""
for char in plaintext:
result += function(char)
return result
def maketrans(from_, to_):
"""Create a translation table and return the proper function."""
trans_table = {}
for n, char in enumerate(from_):
trans_table[char] = to_[n]
return lambda char: trans_table.get(char, char)
def encode(plaintext, code):
"""Encodes text, using a code which is a permutation of the alphabet."""
trans = maketrans(alphabet + alphabet.upper(), code + code.upper())
return translate(plaintext, trans)
def bigrams(text):
"""Return a list of pairs in text (a sequence of letters or words).
>>> bigrams('this')
['th', 'hi', 'is']
>>> bigrams(['this', 'is', 'a', 'test'])
[['this', 'is'], ['is', 'a'], ['a', 'test']]
"""
return [text[i:i + 2] for i in range(len(text) - 1)]
# Decoding a Shift (or Caesar) Cipher
class ShiftDecoder:
"""There are only 26 possible encodings, so we can try all of them,
and return the one with the highest probability, according to a
bigram probability distribution."""
def __init__(self, training_text):
training_text = canonicalize(training_text)
self.P2 = CountingProbDist(bigrams(training_text), default=1)
def score(self, plaintext):
"""Return a score for text based on how common letters pairs are."""
s = 1.0
for bi in bigrams(plaintext):
s = s * self.P2[bi]
return s
def decode(self, ciphertext):
"""Return the shift decoding of text with the best score."""
list_ = [(self.score(shift), shift)
for shift in all_shifts(ciphertext)]
return max(list_, key=lambda elm: elm[0])[1]
def all_shifts(text):
"""Return a list of all 26 possible encodings of text by a shift cipher."""
yield from (shift_encode(text, i) for i, _ in enumerate(alphabet))
# Decoding a General Permutation Cipher
class PermutationDecoder:
"""This is a much harder problem than the shift decoder. There are 26!
permutations, so we can't try them all. Instead we have to search.
We want to search well, but there are many things to consider:
Unigram probabilities (E is the most common letter); Bigram probabilities
(TH is the most common bigram); word probabilities (I and A are the most
common one-letter words, etc.); etc.
We could represent a search state as a permutation of the 26 letters,
and alter the solution through hill climbing. With an initial guess
based on unigram probabilities, this would probably fare well. However,
I chose instead to have an incremental representation. A state is
represented as a letter-to-letter map; for example {'z': 'e'} to
represent that 'z' will be translated to 'e'.
"""
def __init__(self, training_text, ciphertext=None):
self.Pwords = UnigramTextModel(words(training_text))
self.P1 = UnigramTextModel(training_text) # By letter
self.P2 = NgramTextModel(2, training_text) # By letter pair
def decode(self, ciphertext):
"""Search for a decoding of the ciphertext."""
self.ciphertext = ciphertext
problem = PermutationDecoderProblem(decoder=self)
return search.best_first_tree_search(
problem, lambda node: self.score(node.state))
def score(self, code):
"""Score is product of word scores, unigram scores, and bigram scores.
This can get very small, so we use logs and exp."""
# TODO: Implement the permutation_decode function
text = permutation_decode(self.ciphertext, code) # noqa
logP = (sum([log(self.Pwords[word]) for word in words(text)]) +
sum([log(self.P1[c]) for c in text]) +
sum([log(self.P2[b]) for b in bigrams(text)]))
return exp(logP)
class PermutationDecoderProblem(search.Problem):
def __init__(self, initial=None, goal=None, decoder=None):
self.initial = initial or {}
self.decoder = decoder
def actions(self, state):
# Find the best
p, plainchar = max([(self.decoder.P1[c], c)
for c in alphabet if c not in state])
succs = [extend(state, plainchar, cipherchar)] # ???? # noqa
def goal_test(self, state):
"""We're done when we get all 26 letters assigned."""
return len(state) >= 26
|
{
"content_hash": "d74f008bb96f3d2ee26e7f59d6af4eb8",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 96,
"avg_line_length": 35.804627249357324,
"alnum_prop": 0.6097070649052269,
"repo_name": "sofmonk/aima-python",
"id": "991c764d9ca11ff97d125d6772df21802002d4f2",
"size": "13928",
"binary": false,
"copies": "1",
"ref": "refs/heads/branch",
"path": "text.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "9816"
},
{
"name": "Jupyter Notebook",
"bytes": "1545410"
},
{
"name": "Makefile",
"bytes": "562"
},
{
"name": "Python",
"bytes": "363169"
}
],
"symlink_target": ""
}
|
def describe_duration(battle_data):
if 'duration' in battle_data:
if battle_data['battle_still_ongoing']:
return "{0:03d}+".format(battle_data['duration'])
else:
return "{0:03d}".format(battle_data['duration'])
else:
return "???"
def describe_defender(battle_data):
if 'owner' in battle_data:
return ", {} defending".format(battle_data['owner'])
else:
return ""
def describe_creeps(battle_data):
# Sort by a tuple of (not the owner, username) so that the owner of a room always comes first.
items_list = sorted(battle_data['player_creep_counts'].items(),
key=lambda t: (t[0] != battle_data.get('owner'), t[0]))
return " vs ".join("{}'s {}".format(name, describe_player_creep_list(parts)) for name, parts in items_list)
def describe_player_creep_list(creeps):
creeps = sorted(creeps.items(), key=lambda t: t[0])
if len(creeps) >= 2:
last_role, last_count = creeps[-1]
return "{} and {}".format(
", ".join(describe_creep(role, count) for role, count in creeps[:-1]),
describe_creep(last_role, last_count)
)
else:
return ", ".join(describe_creep(role, count) for role, count in creeps)
def describe_creep(role, count):
if count > 1:
return "{} {}s".format(count, role)
else:
return "{} {}".format(count, role)
|
{
"content_hash": "3d454aa87b4a600030c875cace3a17f8",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 111,
"avg_line_length": 34.707317073170735,
"alnum_prop": 0.5888966971187631,
"repo_name": "LeagueOfAutomatedNations/LeagueBot",
"id": "b91db3b1086120e5b0deacf9fee194858f1f891e",
"size": "1423",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "leaguebot/services/battle_description.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "637"
},
{
"name": "Python",
"bytes": "60407"
},
{
"name": "Shell",
"bytes": "1016"
}
],
"symlink_target": ""
}
|
import inspect
import unittest
from unittest import mock
from unittest.mock import MagicMock, Mock, call
import pytest
from google.api_core.exceptions import AlreadyExists, NotFound
from google.api_core.retry import Retry
from airflow import AirflowException
from airflow.models import DAG, DagBag
from airflow.providers.google.cloud.operators.dataproc import (
ClusterGenerator,
DataprocClusterLink,
DataprocCreateBatchOperator,
DataprocCreateClusterOperator,
DataprocCreateWorkflowTemplateOperator,
DataprocDeleteBatchOperator,
DataprocDeleteClusterOperator,
DataprocGetBatchOperator,
DataprocInstantiateInlineWorkflowTemplateOperator,
DataprocInstantiateWorkflowTemplateOperator,
DataprocJobLink,
DataprocListBatchesOperator,
DataprocScaleClusterOperator,
DataprocSubmitHadoopJobOperator,
DataprocSubmitHiveJobOperator,
DataprocSubmitJobOperator,
DataprocSubmitPigJobOperator,
DataprocSubmitPySparkJobOperator,
DataprocSubmitSparkJobOperator,
DataprocSubmitSparkSqlJobOperator,
DataprocUpdateClusterOperator,
)
from airflow.serialization.serialized_objects import SerializedDAG
from airflow.utils.timezone import datetime
from airflow.version import version as airflow_version
from tests.test_utils.db import clear_db_runs, clear_db_xcom
cluster_params = inspect.signature(ClusterGenerator.__init__).parameters
AIRFLOW_VERSION = "v" + airflow_version.replace(".", "-").replace("+", "-")
DATAPROC_PATH = "airflow.providers.google.cloud.operators.dataproc.{}"
TASK_ID = "task-id"
GCP_PROJECT = "test-project"
GCP_LOCATION = "test-location"
GCP_CONN_ID = "test-conn"
IMPERSONATION_CHAIN = ["ACCOUNT_1", "ACCOUNT_2", "ACCOUNT_3"]
CLUSTER_NAME = "cluster_name"
CONFIG = {
"gce_cluster_config": {
"zone_uri": "https://www.googleapis.com/compute/v1/projects/project_id/zones/zone",
"metadata": {"metadata": "data"},
"network_uri": "network_uri",
"subnetwork_uri": "subnetwork_uri",
"internal_ip_only": True,
"tags": ["tags"],
"service_account": "service_account",
"service_account_scopes": ["service_account_scopes"],
},
"master_config": {
"num_instances": 2,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/master_machine_type",
"disk_config": {"boot_disk_type": "master_disk_type", "boot_disk_size_gb": 128},
"image_uri": "https://www.googleapis.com/compute/beta/projects/"
"custom_image_project_id/global/images/custom_image",
},
"worker_config": {
"num_instances": 2,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/worker_machine_type",
"disk_config": {"boot_disk_type": "worker_disk_type", "boot_disk_size_gb": 256},
"image_uri": "https://www.googleapis.com/compute/beta/projects/"
"custom_image_project_id/global/images/custom_image",
},
"secondary_worker_config": {
"num_instances": 4,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/worker_machine_type",
"disk_config": {"boot_disk_type": "worker_disk_type", "boot_disk_size_gb": 256},
"is_preemptible": True,
},
"software_config": {"properties": {"properties": "data"}, "optional_components": ["optional_components"]},
"lifecycle_config": {
"idle_delete_ttl": {'seconds': 60},
"auto_delete_time": "2019-09-12T00:00:00.000000Z",
},
"encryption_config": {"gce_pd_kms_key_name": "customer_managed_key"},
"autoscaling_config": {"policy_uri": "autoscaling_policy"},
"config_bucket": "storage_bucket",
"initialization_actions": [
{"executable_file": "init_actions_uris", "execution_timeout": {'seconds': 600}}
],
}
CONFIG_WITH_CUSTOM_IMAGE_FAMILY = {
"gce_cluster_config": {
"zone_uri": "https://www.googleapis.com/compute/v1/projects/project_id/zones/zone",
"metadata": {"metadata": "data"},
"network_uri": "network_uri",
"subnetwork_uri": "subnetwork_uri",
"internal_ip_only": True,
"tags": ["tags"],
"service_account": "service_account",
"service_account_scopes": ["service_account_scopes"],
},
"master_config": {
"num_instances": 2,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/master_machine_type",
"disk_config": {"boot_disk_type": "master_disk_type", "boot_disk_size_gb": 128},
"image_uri": "https://www.googleapis.com/compute/beta/projects/"
"custom_image_project_id/global/images/family/custom_image_family",
},
"worker_config": {
"num_instances": 2,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/worker_machine_type",
"disk_config": {"boot_disk_type": "worker_disk_type", "boot_disk_size_gb": 256},
"image_uri": "https://www.googleapis.com/compute/beta/projects/"
"custom_image_project_id/global/images/family/custom_image_family",
},
"secondary_worker_config": {
"num_instances": 4,
"machine_type_uri": "projects/project_id/zones/zone/machineTypes/worker_machine_type",
"disk_config": {"boot_disk_type": "worker_disk_type", "boot_disk_size_gb": 256},
"is_preemptible": True,
},
"software_config": {"properties": {"properties": "data"}, "optional_components": ["optional_components"]},
"lifecycle_config": {
"idle_delete_ttl": {'seconds': 60},
"auto_delete_time": "2019-09-12T00:00:00.000000Z",
},
"encryption_config": {"gce_pd_kms_key_name": "customer_managed_key"},
"autoscaling_config": {"policy_uri": "autoscaling_policy"},
"config_bucket": "storage_bucket",
"initialization_actions": [
{"executable_file": "init_actions_uris", "execution_timeout": {'seconds': 600}}
],
}
LABELS = {"labels": "data", "airflow-version": AIRFLOW_VERSION}
LABELS.update({'airflow-version': 'v' + airflow_version.replace('.', '-').replace('+', '-')})
CLUSTER = {"project_id": "project_id", "cluster_name": CLUSTER_NAME, "config": CONFIG, "labels": LABELS}
UPDATE_MASK = {
"paths": ["config.worker_config.num_instances", "config.secondary_worker_config.num_instances"]
}
TIMEOUT = 120
RETRY = mock.MagicMock(Retry)
METADATA = [("key", "value")]
REQUEST_ID = "request_id_uuid"
WORKFLOW_NAME = "airflow-dataproc-test"
WORKFLOW_TEMPLATE = {
"id": WORKFLOW_NAME,
"placement": {
"managed_cluster": {
"cluster_name": CLUSTER_NAME,
"config": CLUSTER,
}
},
"jobs": [{"step_id": "pig_job_1", "pig_job": {}}],
}
TEST_DAG_ID = 'test-dataproc-operators'
DEFAULT_DATE = datetime(2020, 1, 1)
TEST_JOB_ID = "test-job"
DATAPROC_JOB_LINK_EXPECTED = (
f"https://console.cloud.google.com/dataproc/jobs/{TEST_JOB_ID}?"
f"region={GCP_LOCATION}&project={GCP_PROJECT}"
)
DATAPROC_CLUSTER_LINK_EXPECTED = (
f"https://console.cloud.google.com/dataproc/clusters/{CLUSTER_NAME}/monitoring?"
f"region={GCP_LOCATION}&project={GCP_PROJECT}"
)
DATAPROC_JOB_CONF_EXPECTED = {
"job_id": TEST_JOB_ID,
"region": GCP_LOCATION,
"project_id": GCP_PROJECT,
}
DATAPROC_CLUSTER_CONF_EXPECTED = {
"cluster_name": CLUSTER_NAME,
"region": GCP_LOCATION,
"project_id": GCP_PROJECT,
}
BATCH_ID = "test-batch-id"
BATCH = {
"spark_batch": {
"jar_file_uris": ["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
"main_class": "org.apache.spark.examples.SparkPi",
},
}
def assert_warning(msg: str, warnings):
assert any(msg in str(w) for w in warnings)
class DataprocTestBase(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.dagbag = DagBag(dag_folder="/dev/null", include_examples=False)
cls.dag = DAG(TEST_DAG_ID, default_args={"owner": "airflow", "start_date": DEFAULT_DATE})
def setUp(self):
self.mock_ti = MagicMock()
self.mock_context = {"ti": self.mock_ti}
self.extra_links_manager_mock = Mock()
self.extra_links_manager_mock.attach_mock(self.mock_ti, 'ti')
def tearDown(self):
self.mock_ti = MagicMock()
self.mock_context = {"ti": self.mock_ti}
self.extra_links_manager_mock = Mock()
self.extra_links_manager_mock.attach_mock(self.mock_ti, 'ti')
@classmethod
def tearDownClass(cls):
clear_db_runs()
clear_db_xcom()
class DataprocJobTestBase(DataprocTestBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.extra_links_expected_calls = [
call.ti.xcom_push(execution_date=None, key='job_conf', value=DATAPROC_JOB_CONF_EXPECTED),
call.hook().wait_for_job(job_id=TEST_JOB_ID, region=GCP_LOCATION, project_id=GCP_PROJECT),
]
class DataprocClusterTestBase(DataprocTestBase):
@classmethod
def setUpClass(cls):
super().setUpClass()
cls.extra_links_expected_calls_base = [
call.ti.xcom_push(execution_date=None, key='cluster_conf', value=DATAPROC_CLUSTER_CONF_EXPECTED)
]
class TestsClusterGenerator(unittest.TestCase):
def test_image_version(self):
with pytest.raises(ValueError) as ctx:
ClusterGenerator(
custom_image="custom_image",
image_version="image_version",
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
)
assert "custom_image and image_version" in str(ctx.value)
def test_custom_image_family_error_with_image_version(self):
with pytest.raises(ValueError) as ctx:
ClusterGenerator(
image_version="image_version",
custom_image_family="custom_image_family",
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
)
assert "image_version and custom_image_family" in str(ctx.value)
def test_custom_image_family_error_with_custom_image(self):
with pytest.raises(ValueError) as ctx:
ClusterGenerator(
custom_image="custom_image",
custom_image_family="custom_image_family",
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
)
assert "custom_image and custom_image_family" in str(ctx.value)
def test_nodes_number(self):
with pytest.raises(AssertionError) as ctx:
ClusterGenerator(
num_workers=0, num_preemptible_workers=0, project_id=GCP_PROJECT, cluster_name=CLUSTER_NAME
)
assert "num_workers == 0 means single" in str(ctx.value)
def test_build(self):
generator = ClusterGenerator(
project_id="project_id",
num_workers=2,
zone="zone",
network_uri="network_uri",
subnetwork_uri="subnetwork_uri",
internal_ip_only=True,
tags=["tags"],
storage_bucket="storage_bucket",
init_actions_uris=["init_actions_uris"],
init_action_timeout="10m",
metadata={"metadata": "data"},
custom_image="custom_image",
custom_image_project_id="custom_image_project_id",
autoscaling_policy="autoscaling_policy",
properties={"properties": "data"},
optional_components=["optional_components"],
num_masters=2,
master_machine_type="master_machine_type",
master_disk_type="master_disk_type",
master_disk_size=128,
worker_machine_type="worker_machine_type",
worker_disk_type="worker_disk_type",
worker_disk_size=256,
num_preemptible_workers=4,
region="region",
service_account="service_account",
service_account_scopes=["service_account_scopes"],
idle_delete_ttl=60,
auto_delete_time=datetime(2019, 9, 12),
auto_delete_ttl=250,
customer_managed_key="customer_managed_key",
)
cluster = generator.make()
assert CONFIG == cluster
def test_build_with_custom_image_family(self):
generator = ClusterGenerator(
project_id="project_id",
num_workers=2,
zone="zone",
network_uri="network_uri",
subnetwork_uri="subnetwork_uri",
internal_ip_only=True,
tags=["tags"],
storage_bucket="storage_bucket",
init_actions_uris=["init_actions_uris"],
init_action_timeout="10m",
metadata={"metadata": "data"},
custom_image_family="custom_image_family",
custom_image_project_id="custom_image_project_id",
autoscaling_policy="autoscaling_policy",
properties={"properties": "data"},
optional_components=["optional_components"],
num_masters=2,
master_machine_type="master_machine_type",
master_disk_type="master_disk_type",
master_disk_size=128,
worker_machine_type="worker_machine_type",
worker_disk_type="worker_disk_type",
worker_disk_size=256,
num_preemptible_workers=4,
region="region",
service_account="service_account",
service_account_scopes=["service_account_scopes"],
idle_delete_ttl=60,
auto_delete_time=datetime(2019, 9, 12),
auto_delete_ttl=250,
customer_managed_key="customer_managed_key",
)
cluster = generator.make()
assert CONFIG_WITH_CUSTOM_IMAGE_FAMILY == cluster
class TestDataprocClusterCreateOperator(DataprocClusterTestBase):
def test_deprecation_warning(self):
with pytest.warns(DeprecationWarning) as warnings:
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_name="cluster_name",
num_workers=2,
zone="zone",
)
assert_warning("Passing cluster parameters by keywords", warnings)
assert op.project_id == GCP_PROJECT
assert op.cluster_name == "cluster_name"
assert op.cluster_config['worker_config']['num_instances'] == 2
assert "zones/zone" in op.cluster_config['master_config']["machine_type_uri"]
with pytest.warns(DeprecationWarning) as warnings:
op_default_region = DataprocCreateClusterOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
cluster_name="cluster_name",
cluster_config=op.cluster_config,
)
assert_warning("Default region value", warnings)
assert op_default_region.region == 'global'
@mock.patch(DATAPROC_PATH.format("Cluster.to_dict"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, to_dict_mock):
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
mock_hook.return_value.create_cluster.result.return_value = None
create_cluster_args = {
'region': GCP_LOCATION,
'project_id': GCP_PROJECT,
'cluster_name': CLUSTER_NAME,
'request_id': REQUEST_ID,
'retry': RETRY,
'timeout': TIMEOUT,
'metadata': METADATA,
'cluster_config': CONFIG,
'labels': LABELS,
}
expected_calls = self.extra_links_expected_calls_base + [
call.hook().create_cluster(**create_cluster_args),
]
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
labels=LABELS,
cluster_name=CLUSTER_NAME,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
request_id=REQUEST_ID,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.create_cluster.assert_called_once_with(**create_cluster_args)
# Test whether xcom push occurs before create cluster is called
self.extra_links_manager_mock.assert_has_calls(expected_calls, any_order=False)
to_dict_mock.assert_called_once_with(mock_hook().create_cluster().result())
self.mock_ti.xcom_push.assert_called_once_with(
key="cluster_conf",
value=DATAPROC_CLUSTER_CONF_EXPECTED,
execution_date=None,
)
@mock.patch(DATAPROC_PATH.format("Cluster.to_dict"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_if_cluster_exists(self, mock_hook, to_dict_mock):
mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
mock_hook.return_value.get_cluster.return_value.status.state = 0
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
labels=LABELS,
cluster_name=CLUSTER_NAME,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.create_cluster.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
labels=LABELS,
cluster_name=CLUSTER_NAME,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
mock_hook.return_value.get_cluster.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
to_dict_mock.assert_called_once_with(mock_hook.return_value.get_cluster.return_value)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_if_cluster_exists_do_not_use(self, mock_hook):
mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
mock_hook.return_value.get_cluster.return_value.status.state = 0
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
labels=LABELS,
cluster_name=CLUSTER_NAME,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
use_if_exists=False,
)
with pytest.raises(AlreadyExists):
op.execute(context=self.mock_context)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_if_cluster_exists_in_error_state(self, mock_hook):
mock_hook.return_value.create_cluster.side_effect = [AlreadyExists("test")]
cluster_status = mock_hook.return_value.get_cluster.return_value.status
cluster_status.state = 0
cluster_status.State.ERROR = 0
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
labels=LABELS,
cluster_name=CLUSTER_NAME,
delete_on_error=True,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
)
with pytest.raises(AirflowException):
op.execute(context=self.mock_context)
mock_hook.return_value.diagnose_cluster.assert_called_once_with(
region=GCP_LOCATION, project_id=GCP_PROJECT, cluster_name=CLUSTER_NAME
)
mock_hook.return_value.delete_cluster.assert_called_once_with(
region=GCP_LOCATION, project_id=GCP_PROJECT, cluster_name=CLUSTER_NAME
)
@mock.patch(DATAPROC_PATH.format("exponential_sleep_generator"))
@mock.patch(DATAPROC_PATH.format("DataprocCreateClusterOperator._create_cluster"))
@mock.patch(DATAPROC_PATH.format("DataprocCreateClusterOperator._get_cluster"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_if_cluster_exists_in_deleting_state(
self, mock_hook, mock_get_cluster, mock_create_cluster, mock_generator
):
cluster = mock.MagicMock()
cluster.status.state = 0
cluster.status.State.DELETING = 0
cluster2 = mock.MagicMock()
cluster2.status.state = 0
cluster2.status.State.ERROR = 0
mock_create_cluster.side_effect = [AlreadyExists("test"), cluster2]
mock_generator.return_value = [0]
mock_get_cluster.side_effect = [cluster, NotFound("test")]
op = DataprocCreateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_config=CONFIG,
labels=LABELS,
cluster_name=CLUSTER_NAME,
delete_on_error=True,
gcp_conn_id=GCP_CONN_ID,
)
with pytest.raises(AirflowException):
op.execute(context=self.mock_context)
calls = [mock.call(mock_hook.return_value), mock.call(mock_hook.return_value)]
mock_get_cluster.assert_has_calls(calls)
mock_create_cluster.assert_has_calls(calls)
mock_hook.return_value.diagnose_cluster.assert_called_once_with(
region=GCP_LOCATION, project_id=GCP_PROJECT, cluster_name=CLUSTER_NAME
)
@pytest.mark.need_serialized_dag
def test_create_cluster_operator_extra_links(dag_maker, create_task_instance_of_operator):
ti = create_task_instance_of_operator(
DataprocCreateClusterOperator,
dag_id=TEST_DAG_ID,
execution_date=DEFAULT_DATE,
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
delete_on_error=True,
gcp_conn_id=GCP_CONN_ID,
)
serialized_dag = dag_maker.get_serialized_data()
deserialized_dag = SerializedDAG.from_dict(serialized_dag)
deserialized_task = deserialized_dag.task_dict[TASK_ID]
# Assert operator links for serialized DAG
assert serialized_dag["dag"]["tasks"][0]["_operator_extra_links"] == [
{"airflow.providers.google.cloud.operators.dataproc.DataprocClusterLink": {}}
]
# Assert operator link types are preserved during deserialization
assert isinstance(deserialized_task.operator_extra_links[0], DataprocClusterLink)
# Assert operator link is empty when no XCom push occurred
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
# Assert operator link is empty for deserialized task when no XCom push occurred
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
ti.xcom_push(key="cluster_conf", value=DATAPROC_CLUSTER_CONF_EXPECTED)
# Assert operator links are preserved in deserialized tasks after execution
assert (
deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name)
== DATAPROC_CLUSTER_LINK_EXPECTED
)
# Assert operator links after execution
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == DATAPROC_CLUSTER_LINK_EXPECTED
class TestDataprocClusterScaleOperator(DataprocClusterTestBase):
def test_deprecation_warning(self):
with pytest.warns(DeprecationWarning) as warnings:
DataprocScaleClusterOperator(task_id=TASK_ID, cluster_name=CLUSTER_NAME, project_id=GCP_PROJECT)
assert_warning("DataprocUpdateClusterOperator", warnings)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
mock_hook.return_value.update_cluster.result.return_value = None
cluster_update = {
"config": {"worker_config": {"num_instances": 3}, "secondary_worker_config": {"num_instances": 4}}
}
update_cluster_args = {
'project_id': GCP_PROJECT,
'region': GCP_LOCATION,
'cluster_name': CLUSTER_NAME,
'cluster': cluster_update,
'graceful_decommission_timeout': {"seconds": 600},
'update_mask': UPDATE_MASK,
}
expected_calls = self.extra_links_expected_calls_base + [
call.hook().update_cluster(**update_cluster_args)
]
op = DataprocScaleClusterOperator(
task_id=TASK_ID,
cluster_name=CLUSTER_NAME,
project_id=GCP_PROJECT,
region=GCP_LOCATION,
num_workers=3,
num_preemptible_workers=4,
graceful_decommission_timeout="10m",
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.update_cluster.assert_called_once_with(**update_cluster_args)
# Test whether xcom push occurs before cluster is updated
self.extra_links_manager_mock.assert_has_calls(expected_calls, any_order=False)
self.mock_ti.xcom_push.assert_called_once_with(
key="cluster_conf",
value=DATAPROC_CLUSTER_CONF_EXPECTED,
execution_date=None,
)
@pytest.mark.need_serialized_dag
def test_scale_cluster_operator_extra_links(dag_maker, create_task_instance_of_operator):
ti = create_task_instance_of_operator(
DataprocScaleClusterOperator,
dag_id=TEST_DAG_ID,
execution_date=DEFAULT_DATE,
task_id=TASK_ID,
cluster_name=CLUSTER_NAME,
project_id=GCP_PROJECT,
region=GCP_LOCATION,
num_workers=3,
num_preemptible_workers=2,
graceful_decommission_timeout="2m",
gcp_conn_id=GCP_CONN_ID,
)
serialized_dag = dag_maker.get_serialized_data()
deserialized_dag = SerializedDAG.from_dict(serialized_dag)
deserialized_task = deserialized_dag.task_dict[TASK_ID]
# Assert operator links for serialized DAG
assert serialized_dag["dag"]["tasks"][0]["_operator_extra_links"] == [
{"airflow.providers.google.cloud.operators.dataproc.DataprocClusterLink": {}}
]
# Assert operator link types are preserved during deserialization
assert isinstance(deserialized_task.operator_extra_links[0], DataprocClusterLink)
# Assert operator link is empty when no XCom push occurred
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
# Assert operator link is empty for deserialized task when no XCom push occurred
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
ti.xcom_push(key="cluster_conf", value=DATAPROC_CLUSTER_CONF_EXPECTED)
# Assert operator links are preserved in deserialized tasks after execution
assert (
deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name)
== DATAPROC_CLUSTER_LINK_EXPECTED
)
# Assert operator links after execution
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == DATAPROC_CLUSTER_LINK_EXPECTED
class TestDataprocClusterDeleteOperator(unittest.TestCase):
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
op = DataprocDeleteClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
request_id=REQUEST_ID,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.delete_cluster.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
cluster_name=CLUSTER_NAME,
cluster_uuid=None,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataprocSubmitJobOperator(DataprocJobTestBase):
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
xcom_push_call = call.ti.xcom_push(
execution_date=None, key='job_conf', value=DATAPROC_JOB_CONF_EXPECTED
)
wait_for_job_call = call.hook().wait_for_job(
job_id=TEST_JOB_ID, region=GCP_LOCATION, project_id=GCP_PROJECT, timeout=None
)
job = {}
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = TEST_JOB_ID
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
op = DataprocSubmitJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
job=job,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
# Test whether xcom push occurs before polling for job
self.assertLess(
self.extra_links_manager_mock.mock_calls.index(xcom_push_call),
self.extra_links_manager_mock.mock_calls.index(wait_for_job_call),
msg='Xcom push for Job Link has to be done before polling for job status',
)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
job=job,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=TEST_JOB_ID, project_id=GCP_PROJECT, region=GCP_LOCATION, timeout=None
)
self.mock_ti.xcom_push.assert_called_once_with(
key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED, execution_date=None
)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_async(self, mock_hook):
job = {}
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = TEST_JOB_ID
op = DataprocSubmitJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
job=job,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
asynchronous=True,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
job=job,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
mock_hook.return_value.wait_for_job.assert_not_called()
self.mock_ti.xcom_push.assert_called_once_with(
key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED, execution_date=None
)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_on_kill(self, mock_hook):
job = {}
job_id = "job_id"
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = job_id
op = DataprocSubmitJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
job=job,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
cancel_on_kill=False,
)
op.execute(context=self.mock_context)
op.on_kill()
mock_hook.return_value.cancel_job.assert_not_called()
op.cancel_on_kill = True
op.on_kill()
mock_hook.return_value.cancel_job.assert_called_once_with(
project_id=GCP_PROJECT, region=GCP_LOCATION, job_id=job_id
)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_location_deprecation_warning(self, mock_hook):
xcom_push_call = call.ti.xcom_push(
execution_date=None, key='job_conf', value=DATAPROC_JOB_CONF_EXPECTED
)
wait_for_job_call = call.hook().wait_for_job(
job_id=TEST_JOB_ID, region=GCP_LOCATION, project_id=GCP_PROJECT, timeout=None
)
job = {}
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = TEST_JOB_ID
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
warning_message = (
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead."
)
with pytest.warns(DeprecationWarning) as warnings:
op = DataprocSubmitJobOperator(
task_id=TASK_ID,
location=GCP_LOCATION,
project_id=GCP_PROJECT,
job=job,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN
)
# Test whether xcom push occurs before polling for job
self.assertLess(
self.extra_links_manager_mock.mock_calls.index(xcom_push_call),
self.extra_links_manager_mock.mock_calls.index(wait_for_job_call),
msg='Xcom push for Job Link has to be done before polling for job status',
)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
job=job,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=TEST_JOB_ID, project_id=GCP_PROJECT, region=GCP_LOCATION, timeout=None
)
self.mock_ti.xcom_push.assert_called_once_with(
key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED, execution_date=None
)
assert warning_message == str(warnings[0].message)
with pytest.raises(TypeError):
op = DataprocSubmitJobOperator(
task_id=TASK_ID,
project_id=GCP_PROJECT,
job=job,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
request_id=REQUEST_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
@pytest.mark.need_serialized_dag
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_submit_job_operator_extra_links(mock_hook, dag_maker, create_task_instance_of_operator):
mock_hook.return_value.project_id = GCP_PROJECT
ti = create_task_instance_of_operator(
DataprocSubmitJobOperator,
dag_id=TEST_DAG_ID,
execution_date=DEFAULT_DATE,
task_id=TASK_ID,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
job={},
gcp_conn_id=GCP_CONN_ID,
)
serialized_dag = dag_maker.get_serialized_data()
deserialized_dag = SerializedDAG.from_dict(serialized_dag)
deserialized_task = deserialized_dag.task_dict[TASK_ID]
# Assert operator links for serialized_dag
assert serialized_dag["dag"]["tasks"][0]["_operator_extra_links"] == [
{"airflow.providers.google.cloud.operators.dataproc.DataprocJobLink": {}}
]
# Assert operator link types are preserved during deserialization
assert isinstance(deserialized_task.operator_extra_links[0], DataprocJobLink)
# Assert operator link is empty when no XCom push occurred
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == ""
# Assert operator link is empty for deserialized task when no XCom push occurred
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == ""
ti.xcom_push(key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED)
# Assert operator links are preserved in deserialized tasks
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == DATAPROC_JOB_LINK_EXPECTED
# Assert operator links after execution
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == DATAPROC_JOB_LINK_EXPECTED
class TestDataprocUpdateClusterOperator(DataprocClusterTestBase):
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
mock_hook.return_value.update_cluster.result.return_value = None
cluster_decommission_timeout = {"graceful_decommission_timeout": "600s"}
update_cluster_args = {
'region': GCP_LOCATION,
'project_id': GCP_PROJECT,
'cluster_name': CLUSTER_NAME,
'cluster': CLUSTER,
'update_mask': UPDATE_MASK,
'graceful_decommission_timeout': cluster_decommission_timeout,
'request_id': REQUEST_ID,
'retry': RETRY,
'timeout': TIMEOUT,
'metadata': METADATA,
}
expected_calls = self.extra_links_expected_calls_base + [
call.hook().update_cluster(**update_cluster_args)
]
op = DataprocUpdateClusterOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
cluster=CLUSTER,
update_mask=UPDATE_MASK,
request_id=REQUEST_ID,
graceful_decommission_timeout=cluster_decommission_timeout,
project_id=GCP_PROJECT,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.update_cluster.assert_called_once_with(**update_cluster_args)
# Test whether the xcom push happens before updating the cluster
self.extra_links_manager_mock.assert_has_calls(expected_calls, any_order=False)
self.mock_ti.xcom_push.assert_called_once_with(
key="cluster_conf",
value=DATAPROC_CLUSTER_CONF_EXPECTED,
execution_date=None,
)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_location_deprecation_warning(self, mock_hook):
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
mock_hook.return_value.update_cluster.result.return_value = None
cluster_decommission_timeout = {"graceful_decommission_timeout": "600s"}
update_cluster_args = {
'region': GCP_LOCATION,
'project_id': GCP_PROJECT,
'cluster_name': CLUSTER_NAME,
'cluster': CLUSTER,
'update_mask': UPDATE_MASK,
'graceful_decommission_timeout': cluster_decommission_timeout,
'request_id': REQUEST_ID,
'retry': RETRY,
'timeout': TIMEOUT,
'metadata': METADATA,
}
expected_calls = self.extra_links_expected_calls_base + [
call.hook().update_cluster(**update_cluster_args)
]
warning_message = (
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead."
)
with pytest.warns(DeprecationWarning) as warnings:
op = DataprocUpdateClusterOperator(
task_id=TASK_ID,
location=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
cluster=CLUSTER,
update_mask=UPDATE_MASK,
request_id=REQUEST_ID,
graceful_decommission_timeout=cluster_decommission_timeout,
project_id=GCP_PROJECT,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN
)
mock_hook.return_value.update_cluster.assert_called_once_with(**update_cluster_args)
assert warning_message == str(warnings[0].message)
# Test whether the xcom push happens before updating the cluster
self.extra_links_manager_mock.assert_has_calls(expected_calls, any_order=False)
self.mock_ti.xcom_push.assert_called_once_with(
key="cluster_conf",
value=DATAPROC_CLUSTER_CONF_EXPECTED,
execution_date=None,
)
with pytest.raises(TypeError):
op = DataprocUpdateClusterOperator(
task_id=TASK_ID,
cluster_name=CLUSTER_NAME,
cluster=CLUSTER,
update_mask=UPDATE_MASK,
request_id=REQUEST_ID,
graceful_decommission_timeout=cluster_decommission_timeout,
project_id=GCP_PROJECT,
gcp_conn_id=GCP_CONN_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=self.mock_context)
@pytest.mark.need_serialized_dag
def test_update_cluster_operator_extra_links(dag_maker, create_task_instance_of_operator):
ti = create_task_instance_of_operator(
DataprocUpdateClusterOperator,
dag_id=TEST_DAG_ID,
execution_date=DEFAULT_DATE,
task_id=TASK_ID,
region=GCP_LOCATION,
cluster_name=CLUSTER_NAME,
cluster=CLUSTER,
update_mask=UPDATE_MASK,
graceful_decommission_timeout={"graceful_decommission_timeout": "600s"},
project_id=GCP_PROJECT,
gcp_conn_id=GCP_CONN_ID,
)
serialized_dag = dag_maker.get_serialized_data()
deserialized_dag = SerializedDAG.from_dict(serialized_dag)
deserialized_task = deserialized_dag.task_dict[TASK_ID]
# Assert operator links for serialized_dag
assert serialized_dag["dag"]["tasks"][0]["_operator_extra_links"] == [
{"airflow.providers.google.cloud.operators.dataproc.DataprocClusterLink": {}}
]
# Assert operator link types are preserved during deserialization
assert isinstance(deserialized_task.operator_extra_links[0], DataprocClusterLink)
# Assert operator link is empty when no XCom push occurred
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
# Assert operator link is empty for deserialized task when no XCom push occurred
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == ""
ti.xcom_push(key="cluster_conf", value=DATAPROC_CLUSTER_CONF_EXPECTED)
# Assert operator links are preserved in deserialized tasks
assert (
deserialized_task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name)
== DATAPROC_CLUSTER_LINK_EXPECTED
)
# Assert operator links after execution
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocClusterLink.name) == DATAPROC_CLUSTER_LINK_EXPECTED
class TestDataprocWorkflowTemplateInstantiateOperator(unittest.TestCase):
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
template_id = "template_id"
version = 6
parameters = {}
op = DataprocInstantiateWorkflowTemplateOperator(
task_id=TASK_ID,
template_id=template_id,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
version=version,
parameters=parameters,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.instantiate_workflow_template.assert_called_once_with(
template_name=template_id,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
version=version,
parameters=parameters,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataprocWorkflowTemplateInstantiateInlineOperator(unittest.TestCase):
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
template = {}
op = DataprocInstantiateInlineWorkflowTemplateOperator(
task_id=TASK_ID,
template=template,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.instantiate_inline_workflow_template.assert_called_once_with(
template=template,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataProcHiveOperator(unittest.TestCase):
query = "define sin HiveUDF('sin');"
variables = {"key": "value"}
job_id = "uuid_id"
job = {
"reference": {"project_id": GCP_PROJECT, "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"hive_job": {"query_list": {"queries": [query]}, "script_variables": variables},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitHiveJobOperator(task_id=TASK_ID, region=GCP_LOCATION, query="query")
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_uuid.return_value = self.job_id
mock_hook.return_value.project_id = GCP_PROJECT
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = self.job_id
op = DataprocSubmitHiveJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=MagicMock())
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT, job=self.job, region=GCP_LOCATION
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=self.job_id, region=GCP_LOCATION, project_id=GCP_PROJECT
)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_builder(self, mock_hook, mock_uuid):
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = self.job_id
op = DataprocSubmitHiveJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
)
job = op.generate_job()
assert self.job == job
class TestDataProcPigOperator(unittest.TestCase):
query = "define sin HiveUDF('sin');"
variables = {"key": "value"}
job_id = "uuid_id"
job = {
"reference": {"project_id": GCP_PROJECT, "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"pig_job": {"query_list": {"queries": [query]}, "script_variables": variables},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitPigJobOperator(task_id=TASK_ID, region=GCP_LOCATION, query="query")
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_uuid.return_value = self.job_id
mock_hook.return_value.project_id = GCP_PROJECT
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = self.job_id
op = DataprocSubmitPigJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=MagicMock())
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT, job=self.job, region=GCP_LOCATION
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=self.job_id, region=GCP_LOCATION, project_id=GCP_PROJECT
)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_builder(self, mock_hook, mock_uuid):
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = self.job_id
op = DataprocSubmitPigJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
)
job = op.generate_job()
assert self.job == job
class TestDataProcSparkSqlOperator(unittest.TestCase):
query = "SHOW DATABASES;"
variables = {"key": "value"}
job_id = "uuid_id"
job = {
"reference": {"project_id": GCP_PROJECT, "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"spark_sql_job": {"query_list": {"queries": [query]}, "script_variables": variables},
}
other_project_job = {
"reference": {"project_id": "other-project", "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"spark_sql_job": {"query_list": {"queries": [query]}, "script_variables": variables},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitSparkSqlJobOperator(task_id=TASK_ID, region=GCP_LOCATION, query="query")
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_uuid.return_value = self.job_id
mock_hook.return_value.project_id = GCP_PROJECT
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = self.job_id
op = DataprocSubmitSparkSqlJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=MagicMock())
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id=GCP_PROJECT, job=self.job, region=GCP_LOCATION
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=self.job_id, region=GCP_LOCATION, project_id=GCP_PROJECT
)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute_override_project_id(self, mock_hook, mock_uuid):
mock_uuid.return_value = self.job_id
mock_hook.return_value.project_id = GCP_PROJECT
mock_hook.return_value.wait_for_job.return_value = None
mock_hook.return_value.submit_job.return_value.reference.job_id = self.job_id
op = DataprocSubmitSparkSqlJobOperator(
project_id="other-project",
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
impersonation_chain=IMPERSONATION_CHAIN,
)
op.execute(context=MagicMock())
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.submit_job.assert_called_once_with(
project_id="other-project", job=self.other_project_job, region=GCP_LOCATION
)
mock_hook.return_value.wait_for_job.assert_called_once_with(
job_id=self.job_id, region=GCP_LOCATION, project_id="other-project"
)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_builder(self, mock_hook, mock_uuid):
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = self.job_id
op = DataprocSubmitSparkSqlJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
query=self.query,
variables=self.variables,
)
job = op.generate_job()
assert self.job == job
class TestDataProcSparkOperator(DataprocJobTestBase):
main_class = "org.apache.spark.examples.SparkPi"
jars = ["file:///usr/lib/spark/examples/jars/spark-examples.jar"]
job = {
"reference": {
"project_id": GCP_PROJECT,
"job_id": "{{task.task_id}}_{{ds_nodash}}_" + TEST_JOB_ID,
},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"spark_job": {"jar_file_uris": jars, "main_class": main_class},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitSparkJobOperator(
task_id=TASK_ID, region=GCP_LOCATION, main_class=self.main_class, dataproc_jars=self.jars
)
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_uuid.return_value = TEST_JOB_ID
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = TEST_JOB_ID
mock_hook.return_value.submit_job.return_value.reference.job_id = TEST_JOB_ID
self.extra_links_manager_mock.attach_mock(mock_hook, 'hook')
op = DataprocSubmitSparkJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
main_class=self.main_class,
dataproc_jars=self.jars,
)
job = op.generate_job()
assert self.job == job
op.execute(context=self.mock_context)
self.mock_ti.xcom_push.assert_called_once_with(
key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED, execution_date=None
)
# Test whether xcom push occurs before polling for job
self.extra_links_manager_mock.assert_has_calls(self.extra_links_expected_calls, any_order=False)
@pytest.mark.need_serialized_dag
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_submit_spark_job_operator_extra_links(mock_hook, dag_maker, create_task_instance_of_operator):
mock_hook.return_value.project_id = GCP_PROJECT
ti = create_task_instance_of_operator(
DataprocSubmitSparkJobOperator,
dag_id=TEST_DAG_ID,
execution_date=DEFAULT_DATE,
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
main_class="org.apache.spark.examples.SparkPi",
dataproc_jars=["file:///usr/lib/spark/examples/jars/spark-examples.jar"],
)
serialized_dag = dag_maker.get_serialized_data()
deserialized_dag = SerializedDAG.from_dict(serialized_dag)
deserialized_task = deserialized_dag.task_dict[TASK_ID]
# Assert operator links for serialized DAG
assert serialized_dag["dag"]["tasks"][0]["_operator_extra_links"] == [
{"airflow.providers.google.cloud.operators.dataproc.DataprocJobLink": {}}
]
# Assert operator link types are preserved during deserialization
assert isinstance(deserialized_task.operator_extra_links[0], DataprocJobLink)
# Assert operator link is empty when no XCom push occurred
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == ""
# Assert operator link is empty for deserialized task when no XCom push occurred
assert deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == ""
ti.xcom_push(key="job_conf", value=DATAPROC_JOB_CONF_EXPECTED)
# Assert operator links after task execution
assert ti.task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name) == DATAPROC_JOB_LINK_EXPECTED
# Assert operator links are preserved in deserialized tasks
link = deserialized_task.get_extra_links(DEFAULT_DATE, DataprocJobLink.name)
assert link == DATAPROC_JOB_LINK_EXPECTED
class TestDataProcHadoopOperator(unittest.TestCase):
args = ["wordcount", "gs://pub/shakespeare/rose.txt"]
jar = "file:///usr/lib/spark/examples/jars/spark-examples.jar"
job_id = "uuid_id"
job = {
"reference": {"project_id": GCP_PROJECT, "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"hadoop_job": {"main_jar_file_uri": jar, "args": args},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitHadoopJobOperator(
task_id=TASK_ID, region=GCP_LOCATION, main_jar=self.jar, arguments=self.args
)
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_uuid.return_value = self.job_id
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = self.job_id
op = DataprocSubmitHadoopJobOperator(
task_id=TASK_ID,
region=GCP_LOCATION,
gcp_conn_id=GCP_CONN_ID,
main_jar=self.jar,
arguments=self.args,
)
job = op.generate_job()
assert self.job == job
class TestDataProcPySparkOperator(unittest.TestCase):
uri = "gs://{}/{}"
job_id = "uuid_id"
job = {
"reference": {"project_id": GCP_PROJECT, "job_id": "{{task.task_id}}_{{ds_nodash}}_" + job_id},
"placement": {"cluster_name": "cluster-1"},
"labels": {"airflow-version": AIRFLOW_VERSION},
"pyspark_job": {"main_python_file_uri": uri},
}
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
DataprocSubmitPySparkJobOperator(task_id=TASK_ID, region=GCP_LOCATION, main=self.uri)
assert_warning("DataprocSubmitJobOperator", warnings)
@mock.patch(DATAPROC_PATH.format("uuid.uuid4"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, mock_uuid):
mock_hook.return_value.project_id = GCP_PROJECT
mock_uuid.return_value = self.job_id
op = DataprocSubmitPySparkJobOperator(
task_id=TASK_ID, region=GCP_LOCATION, gcp_conn_id=GCP_CONN_ID, main=self.uri
)
job = op.generate_job()
assert self.job == job
class TestDataprocCreateWorkflowTemplateOperator:
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
op = DataprocCreateWorkflowTemplateOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
template=WORKFLOW_TEMPLATE,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.create_workflow_template.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
template=WORKFLOW_TEMPLATE,
)
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_location_deprecation_warning(self, mock_hook):
with pytest.warns(DeprecationWarning) as warnings:
warning_message = (
"Parameter `location` will be deprecated. "
"Please provide value through `region` parameter instead."
)
op = DataprocCreateWorkflowTemplateOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
location=GCP_LOCATION,
project_id=GCP_PROJECT,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
template=WORKFLOW_TEMPLATE,
)
op.execute(context={})
mock_hook.assert_called_once_with(
gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN
)
mock_hook.return_value.create_workflow_template.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
template=WORKFLOW_TEMPLATE,
)
assert warning_message == str(warnings[0].message)
with pytest.raises(TypeError):
op = DataprocCreateWorkflowTemplateOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
project_id=GCP_PROJECT,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
template=WORKFLOW_TEMPLATE,
)
op.execute(context={})
class TestDataprocCreateBatchOperator:
@mock.patch(DATAPROC_PATH.format("Batch.to_dict"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, to_dict_mock):
op = DataprocCreateBatchOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
batch=BATCH,
batch_id=BATCH_ID,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.create_batch.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
batch=BATCH,
batch_id=BATCH_ID,
request_id=REQUEST_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataprocDeleteBatchOperator:
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
op = DataprocDeleteBatchOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
project_id=GCP_PROJECT,
region=GCP_LOCATION,
batch_id=BATCH_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.delete_batch.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
batch_id=BATCH_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataprocGetBatchOperator:
@mock.patch(DATAPROC_PATH.format("Batch.to_dict"))
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook, to_dict_mock):
op = DataprocGetBatchOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
project_id=GCP_PROJECT,
region=GCP_LOCATION,
batch_id=BATCH_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.get_batch.assert_called_once_with(
project_id=GCP_PROJECT,
region=GCP_LOCATION,
batch_id=BATCH_ID,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
class TestDataprocListBatchesOperator:
@mock.patch(DATAPROC_PATH.format("DataprocHook"))
def test_execute(self, mock_hook):
page_token = "page_token"
page_size = 42
op = DataprocListBatchesOperator(
task_id=TASK_ID,
gcp_conn_id=GCP_CONN_ID,
impersonation_chain=IMPERSONATION_CHAIN,
region=GCP_LOCATION,
project_id=GCP_PROJECT,
page_size=page_size,
page_token=page_token,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
op.execute(context={})
mock_hook.assert_called_once_with(gcp_conn_id=GCP_CONN_ID, impersonation_chain=IMPERSONATION_CHAIN)
mock_hook.return_value.list_batches.assert_called_once_with(
region=GCP_LOCATION,
project_id=GCP_PROJECT,
page_size=page_size,
page_token=page_token,
retry=RETRY,
timeout=TIMEOUT,
metadata=METADATA,
)
|
{
"content_hash": "2badcd8ca4d9b650bf5d5ab2822deb37",
"timestamp": "",
"source": "github",
"line_count": 1772,
"max_line_length": 110,
"avg_line_length": 39.49379232505643,
"alnum_prop": 0.6222082505751397,
"repo_name": "mistercrunch/airflow",
"id": "34e63537f0c555202e3fe2b16771bc1f94526998",
"size": "70769",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/providers/google/cloud/operators/test_dataproc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "36341"
},
{
"name": "HTML",
"bytes": "99243"
},
{
"name": "JavaScript",
"bytes": "891460"
},
{
"name": "Mako",
"bytes": "494"
},
{
"name": "Python",
"bytes": "773270"
},
{
"name": "Shell",
"bytes": "5659"
}
],
"symlink_target": ""
}
|
import collections
import copy
import uuid
from keystoneauth1.fixture import V2Token
from keystoneauth1 import loading as ks_loading
import mock
from mox3 import mox
from neutronclient.common import exceptions
from neutronclient.v2_0 import client
from oslo_config import cfg
from oslo_config import fixture as config_fixture
from oslo_policy import policy as oslo_policy
from oslo_serialization import jsonutils
from oslo_utils import timeutils
import requests_mock
import six
from six.moves import range
from nova.compute import flavors
from nova import context
from nova import exception
from nova.network import model
from nova.network.neutronv2 import api as neutronapi
from nova.network.neutronv2 import constants
from nova import objects
from nova.pci import manager as pci_manager
from nova.pci import whitelist as pci_whitelist
from nova import policy
from nova import test
from nova.tests.unit import fake_instance
from nova.tests import uuidsentinel as uuids
CONF = cfg.CONF
# NOTE: Neutron client raises Exception which is discouraged by HACKING.
# We set this variable here and use it for assertions below to avoid
# the hacking checks until we can make neutron client throw a custom
# exception class instead.
NEUTRON_CLIENT_EXCEPTION = Exception
fake_info_cache = {
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': False,
'instance_uuid': 'fake-uuid',
'network_info': '[]',
}
class MyComparator(mox.Comparator):
def __init__(self, lhs):
self.lhs = lhs
def _com_dict(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for key, value in six.iteritems(lhs):
if key not in rhs:
return False
rhs_value = rhs[key]
if not self._com(value, rhs_value):
return False
return True
def _com_list(self, lhs, rhs):
if len(lhs) != len(rhs):
return False
for lhs_value in lhs:
if lhs_value not in rhs:
return False
return True
def _com(self, lhs, rhs):
if lhs is None:
return rhs is None
if isinstance(lhs, dict):
if not isinstance(rhs, dict):
return False
return self._com_dict(lhs, rhs)
if isinstance(lhs, list):
if not isinstance(rhs, list):
return False
return self._com_list(lhs, rhs)
if isinstance(lhs, tuple):
if not isinstance(rhs, tuple):
return False
return self._com_list(lhs, rhs)
return lhs == rhs
def equals(self, rhs):
return self._com(self.lhs, rhs)
def __repr__(self):
return str(self.lhs)
class TestNeutronClient(test.NoDBTestCase):
def setUp(self):
super(TestNeutronClient, self).setUp()
neutronapi.reset_state()
def test_withtoken(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token')
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken(self):
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(exceptions.Unauthorized,
neutronapi.get_client,
my_context)
def test_withtoken_context_is_admin(self):
self.flags(url='http://anyhost/', group='neutron')
self.flags(timeout=30, group='neutron')
my_context = context.RequestContext('userid',
'my_tenantid',
auth_token='token',
is_admin=True)
cl = neutronapi.get_client(my_context)
self.assertEqual(CONF.neutron.url, cl.httpclient.endpoint_override)
self.assertEqual(my_context.auth_token,
cl.httpclient.auth.auth_token)
self.assertEqual(CONF.neutron.timeout, cl.httpclient.session.timeout)
def test_withouttoken_keystone_connection_error(self):
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid')
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
neutronapi.get_client,
my_context)
@mock.patch('nova.network.neutronv2.api._ADMIN_AUTH')
@mock.patch.object(client.Client, "list_networks", new=mock.Mock())
def test_reuse_admin_token(self, m):
self.flags(url='http://anyhost/', group='neutron')
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
tokens = ['new_token2', 'new_token1']
def token_vals(*args, **kwargs):
return tokens.pop()
m.get_token.side_effect = token_vals
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token1', client1.httpclient.auth.get_token(None))
client1 = neutronapi.get_client(my_context, True)
client1.list_networks(retrieve_all=False)
self.assertEqual('new_token2', client1.httpclient.auth.get_token(None))
class TestNeutronv2Base(test.TestCase):
def setUp(self):
super(TestNeutronv2Base, self).setUp()
self.context = context.RequestContext('userid', 'my_tenantid')
setattr(self.context,
'auth_token',
'bff4a5a6b9eb4ea2a6efec6eefb77936')
self.tenant_id = '9d049e4b60b64716978ab415e6fbd5c0'
self.instance = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'hostname': 'test-instance',
'availability_zone': 'nova',
'host': 'some_host',
'info_cache': {'network_info': []},
'security_groups': []}
self.instance2 = {'project_id': self.tenant_id,
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance2',
'availability_zone': 'nova',
'info_cache': {'network_info': []},
'security_groups': []}
self.nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'my_tenantid'}]
self.nets2 = []
self.nets2.append(self.nets1[0])
self.nets2.append({'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'my_tenantid'})
self.nets3 = self.nets2 + [{'id': 'my_netid3',
'name': 'my_netname3',
'tenant_id': 'my_tenantid'}]
self.nets4 = [{'id': 'his_netid4',
'name': 'his_netname4',
'tenant_id': 'his_tenantid'}]
# A network request with external networks
self.nets5 = self.nets1 + [{'id': 'the-external-one',
'name': 'out-of-this-world',
'router:external': True,
'tenant_id': 'should-be-an-admin'}]
# A network request with a duplicate
self.nets6 = []
self.nets6.append(self.nets1[0])
self.nets6.append(self.nets1[0])
# A network request with a combo
self.nets7 = []
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
self.nets7.append(self.nets2[1])
self.nets7.append(self.nets1[0])
# A network request with only external network
self.nets8 = [self.nets5[1]]
# An empty network
self.nets9 = []
# A network that is both shared and external
self.nets10 = [{'id': 'net_id', 'name': 'net_name',
'router:external': True, 'shared': True}]
# A network with non-blank dns_domain to test _update_port_dns_name
self.nets11 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'my_tenantid',
'dns_domain': 'my-domain.org.'}]
self.nets = [self.nets1, self.nets2, self.nets3, self.nets4,
self.nets5, self.nets6, self.nets7, self.nets8,
self.nets9, self.nets10, self.nets11]
self.port_address = '10.0.1.2'
self.port_data1 = [{'network_id': 'my_netid1',
'device_id': self.instance2['uuid'],
'tenant_id': self.tenant_id,
'device_owner': 'compute:nova',
'id': 'my_portid1',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [{'ip_address': self.port_address,
'subnet_id': 'my_subid1'}],
'mac_address': 'my_mac1', }]
self.float_data1 = [{'port_id': 'my_portid1',
'fixed_ip_address': self.port_address,
'floating_ip_address': '172.0.1.2'}]
self.dhcp_port_data1 = [{'fixed_ips': [{'ip_address': '10.0.1.9',
'subnet_id': 'my_subid1'}],
'status': 'ACTIVE',
'admin_state_up': True}]
self.port_address2 = '10.0.2.2'
self.port_data2 = []
self.port_data2.append(self.port_data1[0])
self.port_data2.append({'network_id': 'my_netid2',
'device_id': self.instance['uuid'],
'tenant_id': self.tenant_id,
'admin_state_up': True,
'status': 'ACTIVE',
'device_owner': 'compute:nova',
'id': 'my_portid2',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips':
[{'ip_address': self.port_address2,
'subnet_id': 'my_subid2'}],
'mac_address': 'my_mac2', })
self.float_data2 = []
self.float_data2.append(self.float_data1[0])
self.float_data2.append({'port_id': 'my_portid2',
'fixed_ip_address': '10.0.2.2',
'floating_ip_address': '172.0.2.2'})
self.port_data3 = [{'network_id': 'my_netid1',
'device_id': 'device_id3',
'tenant_id': self.tenant_id,
'status': 'DOWN',
'admin_state_up': True,
'device_owner': 'compute:nova',
'id': 'my_portid3',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'fixed_ips': [], # no fixed ip
'mac_address': 'my_mac3', }]
self.subnet_data1 = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2 = []
self.subnet_data_n = [{'id': 'my_subid1',
'cidr': '10.0.1.0/24',
'network_id': 'my_netid1',
'gateway_ip': '10.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']},
{'id': 'my_subid2',
'cidr': '20.0.1.0/24',
'network_id': 'my_netid2',
'gateway_ip': '20.0.1.1',
'dns_nameservers': ['8.8.1.1', '8.8.1.2']}]
self.subnet_data2.append({'id': 'my_subid2',
'cidr': '10.0.2.0/24',
'network_id': 'my_netid2',
'gateway_ip': '10.0.2.1',
'dns_nameservers': ['8.8.2.1', '8.8.2.2']})
self.fip_pool = {'id': '4fdbfd74-eaf8-4884-90d9-00bd6f10c2d3',
'name': 'ext_net',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_pool_nova = {'id': '435e20c3-d9f1-4f1b-bee5-4611a1dd07db',
'name': 'nova',
'router:external': True,
'tenant_id': 'admin_tenantid'}
self.fip_unassociated = {'tenant_id': 'my_tenantid',
'id': 'fip_id1',
'floating_ip_address': '172.24.4.227',
'floating_network_id': self.fip_pool['id'],
'port_id': None,
'fixed_ip_address': None,
'router_id': None}
fixed_ip_address = self.port_data2[1]['fixed_ips'][0]['ip_address']
self.fip_associated = {'tenant_id': 'my_tenantid',
'id': 'fip_id2',
'floating_ip_address': '172.24.4.228',
'floating_network_id': self.fip_pool['id'],
'port_id': self.port_data2[1]['id'],
'fixed_ip_address': fixed_ip_address,
'router_id': 'router_id1'}
self._returned_nw_info = []
self.mox.StubOutWithMock(neutronapi, 'get_client')
self.moxed_client = self.mox.CreateMock(client.Client)
self.addCleanup(CONF.reset)
self.addCleanup(self.mox.VerifyAll)
self.addCleanup(self.mox.UnsetStubs)
self.addCleanup(self.stubs.UnsetAll)
def _fake_instance_object(self, instance):
return fake_instance.fake_instance_obj(self.context, **instance)
def _fake_instance_info_cache(self, nw_info, instance_uuid=None):
info_cache = {}
if instance_uuid is None:
info_cache['instance_uuid'] = str(uuid.uuid4())
else:
info_cache['instance_uuid'] = instance_uuid
info_cache['deleted'] = False
info_cache['created_at'] = timeutils.utcnow()
info_cache['deleted_at'] = timeutils.utcnow()
info_cache['updated_at'] = timeutils.utcnow()
info_cache['network_info'] = model.NetworkInfo.hydrate(six.text_type(
jsonutils.dumps(nw_info)))
return info_cache
def _fake_instance_object_with_info_cache(self, instance):
expected_attrs = ['info_cache']
instance = objects.Instance._from_db_object(self.context,
objects.Instance(), fake_instance.fake_db_instance(**instance),
expected_attrs=expected_attrs)
return instance
def _stub_allocate_for_instance(self, net_idx=1, **kwargs):
self.instance = self._fake_instance_object(self.instance)
self.instance2 = self._fake_instance_object(self.instance2)
api = neutronapi.API()
self.mox.StubOutWithMock(api, 'get_instance_nw_info')
has_portbinding = False
has_extra_dhcp_opts = False
dhcp_options = kwargs.get('dhcp_options')
if dhcp_options is not None:
has_extra_dhcp_opts = True
has_dns_extension = False
if kwargs.get('dns_extension'):
has_dns_extension = True
api.extensions[constants.DNS_INTEGRATION] = 1
if kwargs.get('portbinding'):
has_portbinding = True
api.extensions[constants.PORTBINDING_EXT] = 1
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
neutronapi.get_client(
mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(has_portbinding)
elif has_dns_extension:
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
else:
self.mox.StubOutWithMock(api, '_refresh_neutron_extensions_cache')
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
# Net idx is 1-based for compatibility with existing unit tests
nets = self.nets[net_idx - 1]
ports = {}
fixed_ips = {}
macs = kwargs.get('macs')
if macs:
macs = set(macs)
req_net_ids = []
ordered_networks = []
if 'requested_networks' in kwargs:
for request in kwargs['requested_networks']:
if request.port_id:
if request.port_id == 'my_portid3':
self.moxed_client.show_port(request.port_id
).AndReturn(
{'port': {'id': 'my_portid3',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
''}})
ports['my_netid1'] = [self.port_data1[0],
self.port_data3[0]]
ports[request.port_id] = self.port_data3[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
elif request.port_id == 'invalid_id':
PortNotFound = exceptions.PortNotFoundClient(
status_code=404)
self.moxed_client.show_port(request.port_id
).AndRaise(PortNotFound)
else:
self.moxed_client.show_port(request.port_id).AndReturn(
{'port': {'id': 'my_portid1',
'network_id': 'my_netid1',
'tenant_id': self.tenant_id,
'mac_address': 'my_mac1',
'device_id': kwargs.get('_device') and
self.instance2.uuid or
'',
'dns_name': kwargs.get('_dns_name') or
''}})
ports[request.port_id] = self.port_data1[0]
request.network_id = 'my_netid1'
if macs is not None:
macs.discard('my_mac1')
else:
fixed_ips[request.network_id] = request.address
req_net_ids.append(request.network_id)
ordered_networks.append(request)
else:
for n in nets:
ordered_networks.append(
objects.NetworkRequest(network_id=n['id']))
if kwargs.get('_break') == 'pre_list_networks':
self.mox.ReplayAll()
return api
# search all req_net_ids as in api.py
search_ids = req_net_ids
if search_ids:
mox_list_params = {'id': mox.SameElementsAs(search_ids)}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance.project_id,
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': []})
if kwargs.get('_break') == 'post_list_networks':
self.mox.ReplayAll()
return api
if (('requested_networks' not in kwargs or
kwargs['requested_networks'].as_tuples() == [(None, None, None)])
and len(nets) > 1):
self.mox.ReplayAll()
return api
preexisting_port_ids = []
ports_in_requested_net_order = []
nets_in_requested_net_order = []
for request in ordered_networks:
port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
# Network lookup for available network_id
network = None
for net in nets:
if net['id'] == request.network_id:
network = net
break
# if net_id did not pass validate_networks() and not available
# here then skip it safely not continuing with a None Network
else:
continue
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
if has_dns_extension and not network.get('dns_domain'):
port_req_body['port']['dns_name'] = self.instance.hostname
if not has_portbinding and not has_dns_extension:
api._populate_neutron_extension_values(mox.IgnoreArg(),
self.instance, mox.IgnoreArg(),
mox.IgnoreArg(), network=network,
neutron=self.moxed_client,
bind_host_id=None).AndReturn(None)
elif has_portbinding:
# since _populate_neutron_extension_values() will call
# _has_port_binding_extension()
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client).\
AndReturn(has_portbinding)
else:
api._refresh_neutron_extensions_cache(mox.IgnoreArg(),
neutron=self.moxed_client)
if request.port_id:
port = ports[request.port_id]
self.moxed_client.update_port(request.port_id,
MyComparator(port_req_body)
).AndReturn(
{'port': port})
ports_in_requested_net_order.append(request.port_id)
preexisting_port_ids.append(request.port_id)
else:
request.address = fixed_ips.get(request.network_id)
if request.address:
port_req_body['port']['fixed_ips'] = [
{'ip_address': str(request.address)}]
port_req_body['port']['network_id'] = request.network_id
port_req_body['port']['admin_state_up'] = True
port_req_body['port']['tenant_id'] = \
self.instance.project_id
if macs:
port_req_body['port']['mac_address'] = macs.pop()
if has_portbinding:
port_req_body['port']['binding:host_id'] = (
self.instance.get('host'))
res_port = {'port': {'id': 'fake'}}
if has_extra_dhcp_opts:
port_req_body['port']['extra_dhcp_opts'] = dhcp_options
if kwargs.get('_break') == 'mac' + request.network_id:
self.mox.ReplayAll()
return api
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn(res_port)
ports_in_requested_net_order.append(res_port['port']['id'])
if has_portbinding and has_dns_extension:
api._has_port_binding_extension(mox.IgnoreArg()).\
AndReturn(has_portbinding)
if net_idx == 11:
port_req_body_dns = {
'port': {
'dns_name': self.instance.hostname
}
}
res_port_dns = {
'port': {
'id': ports_in_requested_net_order[-1]
}
}
self.moxed_client.update_port(
ports_in_requested_net_order[-1],
MyComparator(port_req_body_dns)
).AndReturn(res_port_dns)
nets_in_requested_net_order.append(network)
api.get_instance_nw_info(mox.IgnoreArg(),
self.instance,
networks=nets_in_requested_net_order,
port_ids=ports_in_requested_net_order,
admin_client=None,
preexisting_port_ids=preexisting_port_ids,
update_cells=True
).AndReturn(self._returned_nw_info)
self.mox.ReplayAll()
return api
def _verify_nw_info(self, nw_inf, index=0):
id_suffix = index + 1
self.assertEqual('10.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index]['address'])
self.assertEqual('172.0.%s.2' % id_suffix,
nw_inf.fixed_ips()[index].floating_ip_addresses()[0])
self.assertEqual('my_netname%s' % id_suffix,
nw_inf[index]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[index]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[index]['address'])
self.assertEqual('10.0.%s.0/24' % id_suffix,
nw_inf[index]['network']['subnets'][0]['cidr'])
ip_addr = model.IP(address='8.8.%s.1' % id_suffix,
version=4, type='dns')
self.assertIn(ip_addr, nw_inf[index]['network']['subnets'][0]['dns'])
def _get_instance_nw_info(self, number):
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(mox.IgnoreArg(),
self.instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
port_data = number == 1 and self.port_data1 or self.port_data2
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data})
net_ids = [port['network_id'] for port in port_data]
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
for i in range(1, number + 1):
float_data = number == 1 and self.float_data1 or self.float_data2
for ip in port_data[i - 1]['fixed_ips']:
float_data = [x for x in float_data
if x['fixed_ip_address'] == ip['ip_address']]
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=port_data[i - 1]['id']).AndReturn(
{'floatingips': float_data})
subnet_data = i == 1 and self.subnet_data1 or self.subnet_data2
self.moxed_client.list_subnets(
id=mox.SameElementsAs(['my_subid%s' % i])).AndReturn(
{'subnets': subnet_data})
self.moxed_client.list_ports(
network_id=subnet_data[0]['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': []})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context, instance)
for i in range(0, number):
self._verify_nw_info(nw_inf, i)
def _allocate_for_instance(self, net_idx=1, **kwargs):
api = self._stub_allocate_for_instance(net_idx, **kwargs)
return api.allocate_for_instance(self.context, self.instance, **kwargs)
class TestNeutronv2(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_get_instance_nw_info_1(self):
# Test to get one port in one network and subnet.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(1)
def test_get_instance_nw_info_2(self):
# Test to get one port in each of two networks and subnets.
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self._get_instance_nw_info(2)
def test_get_instance_nw_info_with_nets_add_interface(self):
# This tests that adding an interface to an instance does not
# remove the first instance from the instance.
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': self.port_data2[0]['id'],
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
self.nets2,
[self.port_data2[1]['id']])
def test_get_instance_nw_info_remove_ports_from_neutron(self):
# This tests that when a port is removed in neutron it
# is also removed from the nova.
network_model = model.Network(id=self.port_data2[0]['network_id'],
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
self.port_data2,
None,
None)
def test_get_instance_nw_info_ignores_neutron_ports(self):
# Tests that only ports in the network_cache are updated
# and ports returned from neutron that match the same
# instance_id/device_id are ignored.
port_data2 = copy.copy(self.port_data2)
# set device_id on the ports to be the same.
port_data2[1]['device_id'] = port_data2[0]['device_id']
network_model = model.Network(id='network_id',
bridge='br-int',
injected='injected',
label='fake_network',
tenant_id='fake_tenant')
network_cache = {'info_cache': {
'network_info': [{'id': 'network_id',
'address': 'mac_address',
'network': network_model,
'type': 'ovs',
'ovs_interfaceid': 'ovs_interfaceid',
'devname': 'devname'}]}}
self._fake_get_instance_nw_info_helper(network_cache,
port_data2,
None,
None)
def test_get_instance_nw_info_ignores_neutron_ports_empty_cache(self):
# Tests that ports returned from neutron that match the same
# instance_id/device_id are ignored when the instance info cache is
# empty.
port_data2 = copy.copy(self.port_data2)
# set device_id on the ports to be the same.
port_data2[1]['device_id'] = port_data2[0]['device_id']
network_cache = {'info_cache': {'network_info': []}}
self._fake_get_instance_nw_info_helper(network_cache,
port_data2,
None,
None)
def _fake_get_instance_nw_info_helper(self, network_cache,
current_neutron_ports,
networks=None, port_ids=None):
"""Helper function to test get_instance_nw_info.
:param network_cache - data already in the nova network cache.
:param current_neutron_ports - updated list of ports from neutron.
:param networks - networks of ports being added to instance.
:param port_ids - new ports being added to instance.
"""
# keep a copy of the original ports/networks to pass to
# get_instance_nw_info() as the code below changes them.
original_port_ids = copy.copy(port_ids)
original_networks = copy.copy(networks)
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': current_neutron_ports})
ifaces = network_cache['info_cache']['network_info']
if port_ids is None:
port_ids = [iface['id'] for iface in ifaces]
net_ids = [iface['network']['id'] for iface in ifaces]
nets = [{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces]
if networks is None:
if ifaces:
self.moxed_client.list_networks(
id=net_ids).AndReturn({'networks': nets})
else:
non_shared_nets = [
{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces if not iface['shared']]
shared_nets = [
{'id': iface['network']['id'],
'name': iface['network']['label'],
'tenant_id': iface['network']['meta']['tenant_id']}
for iface in ifaces if iface['shared']]
self.moxed_client.list_networks(
shared=False,
tenant_id=self.instance['project_id']
).AndReturn({'networks': non_shared_nets})
self.moxed_client.list_networks(
shared=True).AndReturn({'networks': shared_nets})
else:
networks = networks + [
dict(id=iface['network']['id'],
name=iface['network']['label'],
tenant_id=iface['network']['meta']['tenant_id'])
for iface in ifaces]
port_ids = [iface['id'] for iface in ifaces] + port_ids
index = 0
current_neutron_port_map = {}
for current_neutron_port in current_neutron_ports:
current_neutron_port_map[current_neutron_port['id']] = (
current_neutron_port)
for port_id in port_ids:
current_neutron_port = current_neutron_port_map.get(port_id)
if current_neutron_port:
for ip in current_neutron_port['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=current_neutron_port['id']).AndReturn(
{'floatingips': [self.float_data2[index]]})
self.moxed_client.list_subnets(
id=mox.SameElementsAs([ip['subnet_id']])
).AndReturn(
{'subnets': [self.subnet_data_n[index]]})
self.moxed_client.list_ports(
network_id=current_neutron_port['network_id'],
device_owner='network:dhcp').AndReturn(
{'ports': self.dhcp_port_data1})
index += 1
self.instance['info_cache'] = self._fake_instance_info_cache(
network_cache['info_cache']['network_info'], self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).MultipleTimes().AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_infs = api.get_instance_nw_info(self.context,
instance,
networks=original_networks,
port_ids=original_port_ids)
self.assertEqual(index, len(nw_infs))
# ensure that nic ordering is preserved
for iface_index in range(index):
self.assertEqual(port_ids[iface_index],
nw_infs[iface_index]['id'])
def test_get_instance_nw_info_without_subnet(self):
# Test get instance_nw_info for a port without subnet.
api = neutronapi.API()
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(
mox.IgnoreArg(),
self.instance['uuid'], mox.IgnoreArg()).AndReturn(fake_info_cache)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': self.port_data3})
self.moxed_client.list_networks(
id=[self.port_data1[0]['network_id']]).AndReturn(
{'networks': self.nets1})
neutronapi.get_client(mox.IgnoreArg(),
admin=True).MultipleTimes().AndReturn(
self.moxed_client)
net_info_cache = []
for port in self.port_data3:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(
mox.IgnoreArg(),
self.instance['uuid']).AndReturn(self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nw_inf = api.get_instance_nw_info(self.context,
instance)
id_suffix = 3
self.assertEqual(0, len(nw_inf.fixed_ips()))
self.assertEqual('my_netname1', nw_inf[0]['network']['label'])
self.assertEqual('my_portid%s' % id_suffix, nw_inf[0]['id'])
self.assertEqual('my_mac%s' % id_suffix, nw_inf[0]['address'])
self.assertEqual(0, len(nw_inf[0]['network']['subnets']))
def test_refresh_neutron_extensions_cache(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
api._refresh_neutron_extensions_cache(mox.IgnoreArg())
self.assertEqual(
{constants.QOS_QUEUE: {'name': constants.QOS_QUEUE}},
api.extensions)
def test_populate_neutron_extension_values_rxtx_factor(self):
api = neutronapi.API()
# Note: Don't want the default get_client from setUp()
self.mox.ResetAll()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.QOS_QUEUE}]})
self.mox.ReplayAll()
flavor = flavors.get_default_flavor()
flavor['rxtx_factor'] = 1
instance = objects.Instance(system_metadata={})
instance.flavor = flavor
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body)
self.assertEqual(1, port_req_body['port']['rxtx_factor'])
def test_allocate_for_instance_1(self):
# Allocate one port in one network env.
self._allocate_for_instance(1)
def test_allocate_for_instance_2(self):
# Allocate one port in two networks env.
api = self._stub_allocate_for_instance(net_idx=2)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_accepts_macs_kwargs_None(self):
# The macs kwarg should be accepted as None.
self._allocate_for_instance(1, macs=None)
def test_allocate_for_instance_accepts_macs_kwargs_set(self):
# The macs kwarg should be accepted, as a set, the
# _allocate_for_instance helper checks that the mac is used to create a
# port.
self._allocate_for_instance(1, macs=set(['ab:cd:ef:01:23:45']))
def test_allocate_for_instance_accepts_only_portid(self):
# Make sure allocate_for_instance works when only a portid is provided
self._returned_nw_info = self.port_data1
result = self._allocate_for_instance(
requested_networks=objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')]))
self.assertEqual(self.port_data1, result)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs_via_ports(self,
mock_unbind):
# using a hypervisor MAC via a pre-created port will stop it being
# used to dynamically create a port on a network. We put the network
# first in requested_networks so that if the code were to not pre-check
# requested ports, it would incorrectly assign the mac and not fail.
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac1']),
_break='mac' + self.nets2[1]['id'])
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['my_mac1']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_not_enough_macs(self, mock_unbind):
# If not enough MAC addresses are available to allocate to networks, an
# error should be raised.
# We could pass in macs=set(), but that wouldn't tell us that
# allocate_for_instance tracks used macs properly, so we pass in one
# mac, and ask for two networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
api = self._stub_allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2']),
_break='mac' + self.nets2[0]['id'])
with mock.patch.object(api, '_delete_ports'):
self.assertRaises(exception.PortNotFree,
api.allocate_for_instance, self.context,
self.instance,
requested_networks=requested_networks,
macs=set(['my_mac2']))
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_two_macs_two_networks(self):
# If two MACs are available and two networks requested, two new ports
# get made and no exceptions raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets2[1]['id']),
objects.NetworkRequest(network_id=self.nets2[0]['id'])])
self._allocate_for_instance(
net_idx=2, requested_networks=requested_networks,
macs=set(['my_mac2', 'my_mac1']))
def test_allocate_for_instance_mac_conflicting_requested_port(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
net_idx=1, requested_networks=requested_networks,
macs=set(['unknown:mac']),
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks,
macs=set(['unknown:mac']))
def test_allocate_for_instance_without_requested_networks(self):
api = self._stub_allocate_for_instance(net_idx=3)
self.assertRaises(exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_requested_non_available_network(self):
"""verify that a non available network is ignored.
self.nets2 (net_idx=2) is composed of self.nets3[0] and self.nets3[1]
Do not create a port on a non available network self.nets3[2].
"""
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[0], self.nets3[2], self.nets3[1])])
self._allocate_for_instance(net_idx=2,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets3[1], self.nets3[0], self.nets3[2])])
self._allocate_for_instance(net_idx=3,
requested_networks=requested_networks)
def test_allocate_for_instance_with_invalid_network_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='invalid_id')])
api = self._stub_allocate_for_instance(net_idx=9,
requested_networks=requested_networks,
_break='post_list_networks')
self.assertRaises(exception.NetworkNotFound,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_fixedip(self):
# specify only first and last network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=self.nets1[0]['id'],
address='10.0.1.0')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_with_requested_networks_with_port(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1,
requested_networks=requested_networks)
def test_allocate_for_instance_no_networks(self):
"""verify the exception thrown when there are no networks defined."""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': model.NetworkInfo([])})
self.mox.ReplayAll()
nwinfo = api.allocate_for_instance(self.context, self.instance)
self.assertEqual(0, len(nwinfo))
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
def test_allocate_for_instance_ex1(self,
mock_unbind,
mock_preexisting):
"""verify we will delete created ports
if we fail to allocate all net resources.
Mox to raise exception when creating a second port.
In this case, the code should delete the first created port.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
index = 0
for network in self.nets2:
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': network['id'],
'admin_state_up': True,
'tenant_id': self.instance.project_id,
},
}
port_req_body['port'].update(binding_port_req_body['port'])
port = {'id': 'portid_' + network['id']}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body, network=network,
neutron=self.moxed_client, bind_host_id=None).AndReturn(None)
if index == 0:
self.moxed_client.create_port(
MyComparator(port_req_body)).AndReturn({'port': port})
else:
NeutronOverQuota = exceptions.OverQuotaClient()
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(NeutronOverQuota)
index += 1
self.moxed_client.delete_port('portid_' + self.nets2[0]['id'])
self.mox.ReplayAll()
self.assertRaises(exception.PortLimitExceeded,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
mock_unbind.assert_called_once_with(self.context, [],
self.moxed_client, mock.ANY)
def test_allocate_for_instance_ex2(self):
"""verify we have no port to delete
if we fail to allocate the first net resource.
Mox to raise exception when creating the first port.
In this case, the code should not delete any ports.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_populate_neutron_extension_values')
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
neutron=self.moxed_client,
refresh_cache=True).AndReturn(False)
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets2[0], self.nets2[1])])
self.moxed_client.list_networks(
id=['my_netid1', 'my_netid2']).AndReturn({'networks': self.nets2})
binding_port_req_body = {
'port': {
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
},
}
port_req_body = {
'port': {
'network_id': self.nets2[0]['id'],
'admin_state_up': True,
'device_id': self.instance.uuid,
'tenant_id': self.instance.project_id,
},
}
api._populate_neutron_extension_values(self.context,
self.instance, None, binding_port_req_body,
network=self.nets2[0], neutron=self.moxed_client,
bind_host_id=None).AndReturn(None)
self.moxed_client.create_port(
MyComparator(port_req_body)).AndRaise(
Exception("fail to create port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION, api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_no_port_or_network(self):
class BailOutEarly(Exception):
pass
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
self.moxed_client.list_extensions().AndReturn({'extensions': []})
self.mox.StubOutWithMock(api, '_get_available_networks')
# Make sure we get an empty list and then bail out of the rest
# of the function
api._get_available_networks(self.context, self.instance.project_id,
[],
neutron=self.moxed_client).\
AndRaise(BailOutEarly)
self.mox.ReplayAll()
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest()])
self.assertRaises(BailOutEarly,
api.allocate_for_instance,
self.context, self.instance,
requested_networks=requested_networks)
def test_allocate_for_instance_second_time(self):
# Make sure that allocate_for_instance only returns ports that it
# allocated during _that_ run.
new_port = {'id': 'fake'}
self._returned_nw_info = self.port_data1 + [new_port]
nw_info = self._allocate_for_instance()
self.assertEqual([new_port], nw_info)
def test_allocate_for_instance_port_in_use(self):
# If a port is already in use, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks',
_device=True)
self.assertRaises(exception.PortInUse,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_not_found(self):
# If a port is not found, an exception should be raised.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='invalid_id')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotFound,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_port_invalid_tenantid(self):
self.tenant_id = 'invalid_id'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
_break='pre_list_networks')
self.assertRaises(exception.PortNotUsable,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
def test_allocate_for_instance_with_externalnet_forbidden(self):
"""Only one network is available, it's external, and the client
is unauthorized to use it.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# no networks in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': model.NetworkInfo([])})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.ExternalNetworkAttachForbidden,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_multiple(self):
"""Multiple networks are available, one the client is authorized
to use, and an external one the client is unauthorized to use.
"""
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
self.moxed_client.list_extensions().AndReturn({'extensions': []})
# network found in the tenant
self.moxed_client.list_networks(
tenant_id=self.instance.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
# external network is shared
self.moxed_client.list_networks(shared=True).AndReturn(
{'networks': self.nets8})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(
exception.NetworkAmbiguous,
api.allocate_for_instance,
self.context, self.instance)
def test_allocate_for_instance_with_externalnet_admin_ctx(self):
"""Only one network is available, it's external, and the client
is authorized.
"""
admin_ctx = context.RequestContext('userid', 'my_tenantid',
is_admin=True)
api = self._stub_allocate_for_instance(net_idx=8)
api.allocate_for_instance(admin_ctx, self.instance)
def test_allocate_for_instance_with_external_shared_net(self):
"""Only one network is available, it's external and shared."""
ctx = context.RequestContext('userid', 'my_tenantid')
api = self._stub_allocate_for_instance(net_idx=10)
api.allocate_for_instance(ctx, self.instance)
def _deallocate_for_instance(self, number, requested_networks=None):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
api = neutronapi.API()
port_data = number == 1 and self.port_data1 or self.port_data2
ports = {port['id'] for port in port_data}
ret_data = copy.deepcopy(port_data)
if requested_networks:
if isinstance(requested_networks, objects.NetworkRequestList):
# NOTE(danms): Temporary and transitional
with mock.patch('nova.utils.is_neutron', return_value=True):
requested_networks = requested_networks.as_tuples()
for net, fip, port, request_id in requested_networks:
ret_data.append({'network_id': net,
'device_id': self.instance.uuid,
'device_owner': 'compute:nova',
'id': port,
'status': 'DOWN',
'admin_state_up': True,
'fixed_ips': [],
'mac_address': 'fake_mac', })
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': ret_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
if requested_networks:
for net, fip, port, request_id in requested_networks:
self.moxed_client.update_port(port)
for port in ports:
self.moxed_client.delete_port(port).InAnyOrder("delete_port_group")
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
api.db.instance_info_cache_update(self.context,
self.instance.uuid,
{'network_info': '[]'}).AndReturn(
fake_info_cache)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance,
requested_networks=requested_networks)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(1, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2_with_requested(self, mock_preexisting):
mock_preexisting.return_value = []
requested = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='fake-net',
address='1.2.3.4',
port_id='fake-port')])
# Test to deallocate in one port env.
self._deallocate_for_instance(2, requested_networks=requested)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_1(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in one port env.
self._deallocate_for_instance(1)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_2(self, mock_preexisting):
mock_preexisting.return_value = []
# Test to deallocate in two ports env.
self._deallocate_for_instance(2)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_for_instance_port_not_found(self,
mock_preexisting):
# TODO(mriedem): Remove this conversion when all neutronv2 APIs are
# converted to handling instance objects.
self.instance = fake_instance.fake_instance_obj(self.context,
**self.instance)
mock_preexisting.return_value = []
port_data = self.port_data1
self.moxed_client.list_ports(
device_id=self.instance.uuid).AndReturn(
{'ports': port_data})
self.moxed_client.list_extensions().AndReturn({'extensions': []})
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
for port in reversed(port_data):
self.moxed_client.delete_port(port['id']).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
api = neutronapi.API()
api.deallocate_for_instance(self.context, self.instance)
def _test_deallocate_port_for_instance(self, number):
port_data = number == 1 and self.port_data1 or self.port_data2
nets = number == 1 and self.nets1 or self.nets2
self.moxed_client.delete_port(port_data[0]['id'])
net_info_cache = []
for port in port_data:
net_info_cache.append({"network": {"id": port['network_id']},
"id": port['id']})
self.instance['info_cache'] = self._fake_instance_info_cache(
net_info_cache, self.instance['uuid'])
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
self.moxed_client.list_ports(
tenant_id=self.instance['project_id'],
device_id=self.instance['uuid']).AndReturn(
{'ports': port_data[1:]})
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
net_ids = [port['network_id'] for port in port_data]
self.moxed_client.list_networks(id=net_ids).AndReturn(
{'networks': nets})
float_data = number == 1 and self.float_data1 or self.float_data2
for data in port_data[1:]:
for ip in data['fixed_ips']:
self.moxed_client.list_floatingips(
fixed_ip_address=ip['ip_address'],
port_id=data['id']).AndReturn(
{'floatingips': float_data[1:]})
for port in port_data[1:]:
self.moxed_client.list_subnets(id=['my_subid2']).AndReturn({})
self.mox.StubOutWithMock(api.db, 'instance_info_cache_get')
api.db.instance_info_cache_get(mox.IgnoreArg(),
self.instance['uuid']).AndReturn(
self.instance['info_cache'])
self.mox.ReplayAll()
instance = self._fake_instance_object_with_info_cache(self.instance)
nwinfo = api.deallocate_port_for_instance(self.context, instance,
port_data[0]['id'])
self.assertEqual(len(port_data[1:]), len(nwinfo))
if len(port_data) > 1:
self.assertEqual('my_netid2', nwinfo[0]['network']['id'])
def test_deallocate_port_for_instance_1(self):
# Test to deallocate the first and only port
self._test_deallocate_port_for_instance(1)
def test_deallocate_port_for_instance_2(self):
# Test to deallocate the first port of two
self._test_deallocate_port_for_instance(2)
def test_list_ports(self):
search_opts = {'parm': 'value'}
self.moxed_client.list_ports(**search_opts)
self.mox.ReplayAll()
neutronapi.API().list_ports(self.context, **search_opts)
def test_show_port(self):
self.moxed_client.show_port('foo').AndReturn(
{'port': self.port_data1[0]})
self.mox.ReplayAll()
neutronapi.API().show_port(self.context, 'foo')
def test_validate_networks(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': []})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_without_port_quota_on_network_side(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None)]
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {}})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ex_1(self):
requested_networks = [('my_netid1', None, None, None)]
self.moxed_client.list_networks(
id=mox.SameElementsAs(['my_netid1'])).AndReturn(
{'networks': self.nets1})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': []})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
def test_validate_networks_ex_2(self):
requested_networks = [('my_netid1', None, None, None),
('my_netid2', None, None, None),
('my_netid3', None, None, None)]
ids = ['my_netid1', 'my_netid2', 'my_netid3']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.mox.ReplayAll()
api = neutronapi.API()
try:
api.validate_networks(self.context, requested_networks, 1)
except exception.NetworkNotFound as ex:
self.assertIn("my_netid2", six.text_type(ex))
self.assertIn("my_netid3", six.text_type(ex))
def test_validate_networks_duplicate_enable(self):
# Verify that no duplicateNetworks exception is thrown when duplicate
# network ids are passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid1')])
ids = ['my_netid1', 'my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 50}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': []})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_allocate_for_instance_with_requested_networks_duplicates(self):
# specify a duplicate network to allocate to instance
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id=net['id'])
for net in (self.nets6[0], self.nets6[1])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_port(self):
# specify first port and last port that are in same network
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port['id'])
for port in (self.port_data1[0], self.port_data3[0])])
self._allocate_for_instance(net_idx=6,
requested_networks=requested_networks)
def test_allocate_for_instance_requested_networks_duplicates_combo(self):
# specify a combo net_idx=7 : net2, port in net1, net2, port in net1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data1[0]['id']),
objects.NetworkRequest(network_id='my_netid2'),
objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self._allocate_for_instance(net_idx=7,
requested_networks=requested_networks)
def test_validate_networks_not_specified(self):
requested_networks = objects.NetworkRequestList(objects=[])
self.moxed_client.list_networks(
tenant_id=self.context.project_id,
shared=False).AndReturn(
{'networks': self.nets1})
self.moxed_client.list_networks(
shared=True).AndReturn(
{'networks': self.nets2})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkAmbiguous,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_not_found(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id='3123-ad34-bc43-32332ca33e')])
PortNotFound = exceptions.PortNotFoundClient()
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
PortNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
self.assertRaises(exception.PortNotFound,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_show_raises_non404(self):
# Verify that the correct exception is thrown when a non existent
# port is passed to validate_networks.
fake_port_id = '3123-ad34-bc43-32332ca33e'
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(
network_id='my_netid1',
port_id=fake_port_id)])
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
self.moxed_client.show_port(requested_networks[0].port_id).AndRaise(
NeutronNotFound)
self.mox.ReplayAll()
# Expected call from setUp.
neutronapi.get_client(None)
api = neutronapi.API()
exc = self.assertRaises(exception.NovaException,
api.validate_networks,
self.context, requested_networks, 1)
expected_exception_message = ('Failed to access port %(port_id)s: '
'An unknown exception occurred.' %
{'port_id': fake_port_id})
self.assertEqual(expected_exception_message, str(exc))
def test_validate_networks_port_in_use(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=self.port_data3[0]['id'])])
self.moxed_client.show_port(self.port_data3[0]['id']).\
AndReturn({'port': self.port_data3[0]})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortInUse,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_port_no_subnet_id(self):
port_a = self.port_data3[0]
port_a['device_id'] = None
port_a['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.PortRequiresFixedIP,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_no_subnet_id(self):
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='his_netid4')])
ids = ['his_netid4']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets4})
self.mox.ReplayAll()
api = neutronapi.API()
self.assertRaises(exception.NetworkRequiresSubnet,
api.validate_networks,
self.context, requested_networks, 1)
def test_validate_networks_ports_in_same_network_enable(self):
# Verify that duplicateNetworks exception is not thrown when ports
# on same duplicate network are passed to validate_networks.
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data1[0]
self.assertEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn(
{'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn(
{'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_ports_not_in_same_network(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
api.validate_networks(self.context, requested_networks, 1)
def test_validate_networks_no_quota(self):
# Test validation for a request for one instance needing
# two ports, where the quota is 2 and 2 ports are in use
# => instances which can be created = 0
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 2}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': self.port_data2})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(0, max_count)
def test_validate_networks_with_ports_and_networks(self):
# Test validation for a request for one instance needing
# one port allocated via nova with another port being passed in.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
ids = ['my_netid1']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets1})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': self.port_data2})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(1, max_count)
def test_validate_networks_one_port_and_no_networks(self):
# Test that show quota is not called if no networks are
# passed in and only ports.
port_b = self.port_data2[1]
port_b['device_id'] = None
port_b['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(1, max_count)
def test_validate_networks_some_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is 5 and 2 ports are in use
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': 5}})
self.moxed_client.list_ports(
tenant_id='my_tenantid', fields=['id']).AndReturn(
{'ports': self.port_data2})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(1, max_count)
def test_validate_networks_unlimited_quota(self):
# Test validation for a request for two instance needing
# two ports each, where the quota is -1 (unlimited)
# => instances which can be created = 1
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(network_id='my_netid1'),
objects.NetworkRequest(network_id='my_netid2')])
ids = ['my_netid1', 'my_netid2']
self.moxed_client.list_networks(
id=mox.SameElementsAs(ids)).AndReturn(
{'networks': self.nets2})
self.moxed_client.show_quota(
tenant_id='my_tenantid').AndReturn(
{'quota': {'port': -1}})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 2)
self.assertEqual(2, max_count)
def test_validate_networks_no_quota_but_ports_supplied(self):
port_a = self.port_data3[0]
port_a['fixed_ips'] = {'ip_address': '10.0.0.2',
'subnet_id': 'subnet_id'}
port_b = self.port_data2[1]
self.assertNotEqual(port_a['network_id'], port_b['network_id'])
for port in [port_a, port_b]:
port['device_id'] = None
port['device_owner'] = None
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id=port_a['id']),
objects.NetworkRequest(port_id=port_b['id'])])
self.moxed_client.show_port(port_a['id']).AndReturn({'port': port_a})
self.moxed_client.show_port(port_b['id']).AndReturn({'port': port_b})
self.mox.ReplayAll()
api = neutronapi.API()
max_count = api.validate_networks(self.context,
requested_networks, 1)
self.assertEqual(1, max_count)
def _mock_list_ports(self, port_data=None):
if port_data is None:
port_data = self.port_data2
address = self.port_address
self.moxed_client.list_ports(
fixed_ips=MyComparator('ip_address=%s' % address)).AndReturn(
{'ports': port_data})
self.mox.ReplayAll()
return address
def test_get_fixed_ip_by_address_fails_for_no_ports(self):
address = self._mock_list_ports(port_data=[])
api = neutronapi.API()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.get_fixed_ip_by_address,
self.context, address)
def test_get_fixed_ip_by_address_succeeds_for_1_port(self):
address = self._mock_list_ports(port_data=self.port_data1)
api = neutronapi.API()
result = api.get_fixed_ip_by_address(self.context, address)
self.assertEqual(self.instance2['uuid'], result['instance_uuid'])
def test_get_fixed_ip_by_address_fails_for_more_than_1_port(self):
address = self._mock_list_ports()
api = neutronapi.API()
self.assertRaises(exception.FixedIpAssociatedWithMultipleInstances,
api.get_fixed_ip_by_address,
self.context, address)
def _get_available_networks(self, prv_nets, pub_nets,
req_ids=None, context=None):
api = neutronapi.API()
nets = prv_nets + pub_nets
if req_ids:
mox_list_params = {'id': req_ids}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': nets})
else:
mox_list_params = {'tenant_id': self.instance['project_id'],
'shared': False}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': prv_nets})
mox_list_params = {'shared': True}
self.moxed_client.list_networks(
**mox_list_params).AndReturn({'networks': pub_nets})
self.mox.ReplayAll()
rets = api._get_available_networks(
context if context else self.context,
self.instance['project_id'],
req_ids)
self.assertEqual(nets, rets)
def test_get_available_networks_all_private(self):
self._get_available_networks(prv_nets=self.nets2, pub_nets=[])
def test_get_available_networks_all_public(self):
self._get_available_networks(prv_nets=[], pub_nets=self.nets2)
def test_get_available_networks_private_and_public(self):
self._get_available_networks(prv_nets=self.nets1, pub_nets=self.nets4)
def test_get_available_networks_with_network_ids(self):
prv_nets = [self.nets3[0]]
pub_nets = [self.nets3[-1]]
# specify only first and last network
req_ids = [net['id'] for net in (self.nets3[0], self.nets3[-1])]
self._get_available_networks(prv_nets, pub_nets, req_ids)
def test_get_available_networks_with_custom_policy(self):
rules = {'network:attach_external_network': ''}
policy.set_rules(oslo_policy.Rules.from_dict(rules))
req_ids = [net['id'] for net in self.nets5]
self._get_available_networks(self.nets5, pub_nets=[], req_ids=req_ids)
def test_get_floating_ip_pools(self):
api = neutronapi.API()
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.mox.ReplayAll()
pools = api.get_floating_ip_pools(self.context)
expected = [self.fip_pool['name'], self.fip_pool_nova['name']]
self.assertEqual(expected, pools)
def _get_expected_fip_model(self, fip_data, idx=0):
expected = {'id': fip_data['id'],
'address': fip_data['floating_ip_address'],
'pool': self.fip_pool['name'],
'project_id': fip_data['tenant_id'],
'fixed_ip_id': fip_data['port_id'],
'fixed_ip':
{'address': fip_data['fixed_ip_address']},
'instance': ({'uuid': self.port_data2[idx]['device_id']}
if fip_data['port_id']
else None)}
if expected['instance'] is not None:
expected['fixed_ip']['instance_uuid'] = \
expected['instance']['uuid']
return expected
def _test_get_floating_ip(self, fip_data, idx=0, by_address=False):
api = neutronapi.API()
fip_id = fip_data['id']
net_id = fip_data['floating_network_id']
address = fip_data['floating_ip_address']
if by_address:
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
else:
self.moxed_client.show_floatingip(fip_id).\
AndReturn({'floatingip': fip_data})
self.moxed_client.show_network(net_id).\
AndReturn({'network': self.fip_pool})
if fip_data['port_id']:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[idx]})
self.mox.ReplayAll()
expected = self._get_expected_fip_model(fip_data, idx)
if by_address:
fip = api.get_floating_ip_by_address(self.context, address)
else:
fip = api.get_floating_ip(self.context, fip_id)
self.assertEqual(expected, fip)
def test_get_floating_ip_unassociated(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0)
def test_get_floating_ip_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1)
def test_get_floating_ip_by_address(self):
self._test_get_floating_ip(self.fip_unassociated, idx=0,
by_address=True)
def test_get_floating_ip_by_address_associated(self):
self._test_get_floating_ip(self.fip_associated, idx=1,
by_address=True)
def test_get_floating_ip_by_address_not_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': []})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ip_by_id_not_found(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=404)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpNotFound,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_raises_non404(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NeutronClientException(status_code=0)
floating_ip_id = self.fip_unassociated['id']
self.moxed_client.show_floatingip(floating_ip_id).\
AndRaise(NeutronNotFound)
self.mox.ReplayAll()
self.assertRaises(exceptions.NeutronClientException,
api.get_floating_ip,
self.context, floating_ip_id)
def test_get_floating_ip_by_address_multiple_found(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated] * 2})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpMultipleFoundForAddress,
api.get_floating_ip_by_address,
self.context, address)
def test_get_floating_ips_by_project(self):
api = neutronapi.API()
project_id = self.context.project_id
self.moxed_client.list_floatingips(tenant_id=project_id).\
AndReturn({'floatingips': [self.fip_unassociated,
self.fip_associated]})
search_opts = {'router:external': True}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool, self.fip_pool_nova]})
self.moxed_client.list_ports(tenant_id=project_id).\
AndReturn({'ports': self.port_data2})
self.mox.ReplayAll()
expected = [self._get_expected_fip_model(self.fip_unassociated),
self._get_expected_fip_model(self.fip_associated, idx=1)]
fips = api.get_floating_ips_by_project(self.context)
self.assertEqual(expected, fips)
def _test_get_instance_id_by_floating_address(self, fip_data,
associated=False):
api = neutronapi.API()
address = fip_data['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [fip_data]})
if associated:
self.moxed_client.show_port(fip_data['port_id']).\
AndReturn({'port': self.port_data2[1]})
self.mox.ReplayAll()
if associated:
expected = self.port_data2[1]['device_id']
else:
expected = None
fip = api.get_instance_id_by_floating_address(self.context, address)
self.assertEqual(expected, fip)
def test_get_instance_id_by_floating_address(self):
self._test_get_instance_id_by_floating_address(self.fip_unassociated)
def test_get_instance_id_by_floating_address_associated(self):
self._test_get_instance_id_by_floating_address(self.fip_associated,
associated=True)
def test_allocate_floating_ip(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, 'ext_net')
self.assertEqual(self.fip_unassociated['floating_ip_address'], fip)
def test_allocate_floating_ip_addr_gen_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.IpAddressGenerationFailureClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_exhausted_fail(self):
api = neutronapi.API()
pool_name = self.fip_pool['name']
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndRaise(exceptions.ExternalIpAddressExhaustedClient)
self.mox.ReplayAll()
self.assertRaises(exception.NoMoreFloatingIps,
api.allocate_floating_ip, self.context, 'ext_net')
def test_allocate_floating_ip_with_pool_id(self):
api = neutronapi.API()
pool_id = self.fip_pool['id']
search_opts = {'router:external': True,
'fields': 'id',
'id': pool_id}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context, pool_id)
self.assertEqual(self.fip_unassociated['floating_ip_address'], fip)
def test_allocate_floating_ip_with_default_pool(self):
api = neutronapi.API()
pool_name = self.fip_pool_nova['name']
pool_id = self.fip_pool_nova['id']
search_opts = {'router:external': True,
'fields': 'id',
'name': pool_name}
self.moxed_client.list_networks(**search_opts).\
AndReturn({'networks': [self.fip_pool_nova]})
self.moxed_client.create_floatingip(
{'floatingip': {'floating_network_id': pool_id}}).\
AndReturn({'floatingip': self.fip_unassociated})
self.mox.ReplayAll()
fip = api.allocate_floating_ip(self.context)
self.assertEqual(self.fip_unassociated['floating_ip_address'], fip)
def test_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.release_floating_ip(self.context, address)
def test_disassociate_and_release_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fip_id = self.fip_unassociated['id']
floating_ip = {'address': address}
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.delete_floatingip(fip_id)
self.mox.ReplayAll()
api.disassociate_and_release_floating_ip(self.context, None,
floating_ip)
def test_release_floating_ip_associated(self):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.mox.ReplayAll()
self.assertRaises(exception.FloatingIpAssociated,
api.release_floating_ip, self.context, address)
def _setup_mock_for_refresh_cache(self, api, instances):
nw_info = model.NetworkInfo()
self.mox.StubOutWithMock(api, '_get_instance_nw_info')
self.mox.StubOutWithMock(api.db, 'instance_info_cache_update')
for instance in instances:
api._get_instance_nw_info(mox.IgnoreArg(), instance).\
AndReturn(nw_info)
api.db.instance_info_cache_update(mox.IgnoreArg(),
instance['uuid'],
mox.IgnoreArg()).AndReturn(
fake_info_cache)
def test_associate_floating_ip(self):
api = neutronapi.API()
address = self.fip_unassociated['floating_ip_address']
fixed_address = self.port_address2
fip_id = self.fip_unassociated['id']
instance = self._fake_instance_object(self.instance)
search_opts = {'device_owner': 'compute:nova',
'device_id': instance.uuid}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[1]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_unassociated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': self.fip_associated['port_id'],
'fixed_ip_address': fixed_address}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance,
address, fixed_address)
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_reassociate_floating_ip(self, mock_get):
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
new_fixed_address = self.port_address
fip_id = self.fip_associated['id']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance2['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': 'my_portid1',
'fixed_ip_address': new_fixed_address}})
self.moxed_client.show_port(self.fip_associated['port_id']).\
AndReturn({'port': self.port_data2[1]})
mock_get.return_value = fake_instance.fake_instance_obj(
self.context, **self.instance)
instance2 = self._fake_instance_object(self.instance2)
self._setup_mock_for_refresh_cache(api, [mock_get.return_value,
instance2])
self.mox.ReplayAll()
api.associate_floating_ip(self.context, instance2,
address, new_fixed_address)
def test_associate_floating_ip_not_found_fixed_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fixed_address = self.fip_associated['fixed_ip_address']
search_opts = {'device_owner': 'compute:nova',
'device_id': self.instance['uuid']}
self.moxed_client.list_ports(**search_opts).\
AndReturn({'ports': [self.port_data2[0]]})
self.mox.ReplayAll()
self.assertRaises(exception.FixedIpNotFoundForAddress,
api.associate_floating_ip, self.context,
instance, address, fixed_address)
def test_disassociate_floating_ip(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
address = self.fip_associated['floating_ip_address']
fip_id = self.fip_associated['id']
self.moxed_client.list_floatingips(floating_ip_address=address).\
AndReturn({'floatingips': [self.fip_associated]})
self.moxed_client.update_floatingip(
fip_id, {'floatingip': {'port_id': None}})
self._setup_mock_for_refresh_cache(api, [instance])
self.mox.ReplayAll()
api.disassociate_floating_ip(self.context, instance, address)
def test_add_fixed_ip_to_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
network_id = 'my_netid1'
search_opts = {'network_id': network_id}
self.moxed_client.list_subnets(
**search_opts).AndReturn({'subnets': self.subnet_data_n})
search_opts = {'device_id': instance.uuid,
'device_owner': 'compute:nova',
'network_id': network_id}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [{'subnet_id': 'my_subid1'},
{'subnet_id': 'my_subid1'}],
},
}
port = self.port_data1[0]
port['fixed_ips'] = [{'subnet_id': 'my_subid1'}]
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.add_fixed_ip_to_instance(self.context,
instance,
network_id)
def test_remove_fixed_ip_from_instance(self):
instance = self._fake_instance_object(self.instance)
api = neutronapi.API()
self._setup_mock_for_refresh_cache(api, [instance])
address = '10.0.0.3'
zone = 'compute:%s' % self.instance['availability_zone']
search_opts = {'device_id': self.instance['uuid'],
'device_owner': zone,
'fixed_ips': 'ip_address=%s' % address}
self.moxed_client.list_ports(
**search_opts).AndReturn({'ports': self.port_data1})
port_req_body = {
'port': {
'fixed_ips': [],
},
}
port = self.port_data1[0]
port['fixed_ips'] = []
self.moxed_client.update_port('my_portid1',
MyComparator(port_req_body)).AndReturn({'port': port})
self.mox.ReplayAll()
api.remove_fixed_ip_from_instance(self.context, instance,
address)
def test_list_floating_ips_without_l3_support(self):
api = neutronapi.API()
NeutronNotFound = exceptions.NotFound()
self.moxed_client.list_floatingips(
fixed_ip_address='1.1.1.1', port_id=1).AndRaise(NeutronNotFound)
self.mox.ReplayAll()
neutronapi.get_client('fake')
floatingips = api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 1)
self.assertEqual([], floatingips)
def test_nw_info_get_ips(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'}],
'id': 'port-id',
}
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', 'port-id').AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
self.mox.ReplayAll()
neutronapi.get_client('fake')
result = api._nw_info_get_ips(self.moxed_client, fake_port)
self.assertEqual(1, len(result))
self.assertEqual('1.1.1.1', result[0]['address'])
self.assertEqual('10.0.0.1', result[0]['floating_ips'][0]['address'])
def test_nw_info_get_subnets(self):
fake_port = {
'fixed_ips': [
{'ip_address': '1.1.1.1'},
{'ip_address': '2.2.2.2'}],
'id': 'port-id',
}
fake_subnet = model.Subnet(cidr='1.0.0.0/8')
fake_ips = [model.IP(x['ip_address']) for x in fake_port['fixed_ips']]
api = neutronapi.API()
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
api._get_subnets_from_port(self.context, fake_port).AndReturn(
[fake_subnet])
self.mox.ReplayAll()
neutronapi.get_client('fake')
subnets = api._nw_info_get_subnets(self.context, fake_port, fake_ips)
self.assertEqual(1, len(subnets))
self.assertEqual(1, len(subnets[0]['ips']))
self.assertEqual('1.1.1.1', subnets[0]['ips'][0]['address'])
def _test_nw_info_build_network(self, vif_type):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': vif_type,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant',
'mtu': 9000}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id', net['id'])
self.assertEqual('foo', net['label'])
self.assertEqual('tenant', net.get_meta('tenant_id'))
self.assertEqual(9000, net.get_meta('mtu'))
self.assertEqual(CONF.flat_injected, net.get_meta('injected'))
return net, iid
def test_nw_info_build_network_ovs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_OVS)
self.assertEqual(CONF.neutron.ovs_bridge, net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertEqual('port-id', iid)
def test_nw_info_build_network_dvs(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_DVS)
self.assertEqual('net-id', net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertNotIn('ovs_interfaceid', net)
self.assertIsNone(iid)
def test_nw_info_build_network_bridge(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_BRIDGE)
self.assertEqual('brqnet-id', net['bridge'])
self.assertTrue(net['should_create_bridge'])
self.assertIsNone(iid)
def test_nw_info_build_network_tap(self):
net, iid = self._test_nw_info_build_network(model.VIF_TYPE_TAP)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_network_other(self):
net, iid = self._test_nw_info_build_network(None)
self.assertIsNone(net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertIsNone(iid)
def test_nw_info_build_no_match(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id1',
'tenant_id': 'tenant',
'binding:vif_type': model.VIF_TYPE_OVS,
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id2', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id1', net['id'])
self.assertEqual('tenant', net['meta']['tenant_id'])
def test_nw_info_build_network_vhostuser(self):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': model.VIF_TYPE_VHOSTUSER,
'binding:vif_details': {
model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True
}
}
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertEqual(fake_subnets, net['subnets'])
self.assertEqual('net-id', net['id'])
self.assertEqual('foo', net['label'])
self.assertEqual('tenant', net.get_meta('tenant_id'))
self.assertEqual(CONF.flat_injected, net.get_meta('injected'))
self.assertEqual(CONF.neutron.ovs_bridge, net['bridge'])
self.assertNotIn('should_create_bridge', net)
self.assertEqual('port-id', iid)
def _test_nw_info_build_custom_bridge(self, vif_type, extra_details=None):
fake_port = {
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'id': 'port-id',
'network_id': 'net-id',
'binding:vif_type': vif_type,
'binding:vif_details': {
model.VIF_DETAILS_BRIDGE_NAME: 'custom-bridge',
}
}
if extra_details:
fake_port['binding:vif_details'].update(extra_details)
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [{'id': 'net-id', 'name': 'foo', 'tenant_id': 'tenant'}]
api = neutronapi.API()
self.mox.ReplayAll()
neutronapi.get_client('fake')
net, iid = api._nw_info_build_network(fake_port, fake_nets,
fake_subnets)
self.assertNotEqual(CONF.neutron.ovs_bridge, net['bridge'])
self.assertEqual('custom-bridge', net['bridge'])
def test_nw_info_build_custom_ovs_bridge(self):
self._test_nw_info_build_custom_bridge(model.VIF_TYPE_OVS)
def test_nw_info_build_custom_ovs_bridge_vhostuser(self):
self._test_nw_info_build_custom_bridge(model.VIF_TYPE_VHOSTUSER,
{model.VIF_DETAILS_VHOSTUSER_OVS_PLUG: True})
def test_nw_info_build_custom_lb_bridge(self):
self._test_nw_info_build_custom_bridge(model.VIF_TYPE_BRIDGE)
def test_build_network_info_model(self):
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=False and status='DOWN' thus vif.active=True
{'id': 'port2',
'network_id': 'net-id',
'admin_state_up': False,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:02',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='DOWN' thus vif.active=False
{'id': 'port0',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'DOWN',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:03',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port3',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:04',
'binding:vif_type': model.VIF_TYPE_HW_VEB,
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port4',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:05',
'binding:vif_type': model.VIF_TYPE_802_QBH,
'binding:vnic_type': model.VNIC_TYPE_MACVTAP,
'binding:profile': {'pci_vendor_info': '1137:0047',
'pci_slot': '0000:0a:00.2',
'physical_network': 'phynet1'},
'binding:vif_details': {model.VIF_DETAILS_PROFILEID: 'pfid'},
},
# admin_state_up=True and status='ACTIVE' thus vif.active=True
# This port has no binding:vnic_type to verify default is assumed
{'id': 'port5',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:06',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
# No binding:vnic_type
'binding:vif_details': {},
},
# This does not match the networks we provide below,
# so it should be ignored (and is here to verify that)
{'id': 'port6',
'network_id': 'other-net-id',
'admin_state_up': True,
'status': 'DOWN',
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
fake_nets = [
{'id': 'net-id',
'name': 'foo',
'tenant_id': 'fake',
}
]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
self.mox.StubOutWithMock(api, '_get_floating_ips_by_fixed_and_port')
self.mox.StubOutWithMock(api, '_get_subnets_from_port')
requested_ports = [fake_ports[2], fake_ports[0], fake_ports[1],
fake_ports[3], fake_ports[4], fake_ports[5]]
for requested_port in requested_ports:
api._get_floating_ips_by_fixed_and_port(
self.moxed_client, '1.1.1.1', requested_port['id']).AndReturn(
[{'floating_ip_address': '10.0.0.1'}])
for requested_port in requested_ports:
api._get_subnets_from_port(self.context, requested_port
).AndReturn(fake_subnets)
self.mox.StubOutWithMock(api, '_get_preexisting_port_ids')
api._get_preexisting_port_ids(fake_inst).AndReturn(['port5'])
self.mox.ReplayAll()
neutronapi.get_client('fake')
fake_inst.info_cache = objects.InstanceInfoCache.new(
self.context, 'fake-uuid')
fake_inst.info_cache.network_info = model.NetworkInfo.hydrate([])
nw_infos = api._build_network_info_model(
self.context, fake_inst,
fake_nets,
[fake_ports[2]['id'],
fake_ports[0]['id'],
fake_ports[1]['id'],
fake_ports[3]['id'],
fake_ports[4]['id'],
fake_ports[5]['id']],
preexisting_port_ids=['port3'])
self.assertEqual(6, len(nw_infos))
index = 0
for nw_info in nw_infos:
self.assertEqual(requested_ports[index]['mac_address'],
nw_info['address'])
self.assertEqual('tapport' + str(index), nw_info['devname'])
self.assertIsNone(nw_info['ovs_interfaceid'])
self.assertEqual(requested_ports[index]['binding:vif_type'],
nw_info['type'])
if nw_info['type'] == model.VIF_TYPE_BRIDGE:
self.assertEqual('brqnet-id', nw_info['network']['bridge'])
self.assertEqual(requested_ports[index].get('binding:vnic_type',
model.VNIC_TYPE_NORMAL), nw_info['vnic_type'])
self.assertEqual(requested_ports[index].get('binding:vif_details'),
nw_info.get('details'))
self.assertEqual(requested_ports[index].get('binding:profile'),
nw_info.get('profile'))
index += 1
self.assertFalse(nw_infos[0]['active'])
self.assertTrue(nw_infos[1]['active'])
self.assertTrue(nw_infos[2]['active'])
self.assertTrue(nw_infos[3]['active'])
self.assertTrue(nw_infos[4]['active'])
self.assertTrue(nw_infos[5]['active'])
self.assertEqual('port0', nw_infos[0]['id'])
self.assertEqual('port1', nw_infos[1]['id'])
self.assertEqual('port2', nw_infos[2]['id'])
self.assertEqual('port3', nw_infos[3]['id'])
self.assertEqual('port4', nw_infos[4]['id'])
self.assertEqual('port5', nw_infos[5]['id'])
self.assertFalse(nw_infos[0]['preserve_on_delete'])
self.assertFalse(nw_infos[1]['preserve_on_delete'])
self.assertFalse(nw_infos[2]['preserve_on_delete'])
self.assertTrue(nw_infos[3]['preserve_on_delete'])
self.assertFalse(nw_infos[4]['preserve_on_delete'])
self.assertTrue(nw_infos[5]['preserve_on_delete'])
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_subnets')
@mock.patch('nova.network.neutronv2.api.API._nw_info_get_ips')
@mock.patch('nova.network.neutronv2.api.API._nw_info_build_network')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API._gather_port_ids_and_networks')
def test_build_network_info_model_empty(
self, mock_gather_port_ids_and_networks,
mock_get_preexisting_port_ids,
mock_nw_info_build_network,
mock_nw_info_get_ips,
mock_nw_info_get_subnets):
# An empty instance info network cache should not be populated from
# ports found in Neutron.
api = neutronapi.API()
fake_inst = objects.Instance()
fake_inst.project_id = 'fake'
fake_inst.uuid = 'uuid'
fake_inst.info_cache = objects.InstanceInfoCache()
fake_inst.info_cache.network_info = model.NetworkInfo()
fake_ports = [
# admin_state_up=True and status='ACTIVE' thus vif.active=True
{'id': 'port1',
'network_id': 'net-id',
'admin_state_up': True,
'status': 'ACTIVE',
'fixed_ips': [{'ip_address': '1.1.1.1'}],
'mac_address': 'de:ad:be:ef:00:01',
'binding:vif_type': model.VIF_TYPE_BRIDGE,
'binding:vnic_type': model.VNIC_TYPE_NORMAL,
'binding:vif_details': {},
},
]
fake_subnets = [model.Subnet(cidr='1.0.0.0/8')]
neutronapi.get_client(mox.IgnoreArg(), admin=True).MultipleTimes(
).AndReturn(self.moxed_client)
self.moxed_client.list_ports(
tenant_id='fake', device_id='uuid').AndReturn(
{'ports': fake_ports})
mock_gather_port_ids_and_networks.return_value = ([], [])
mock_get_preexisting_port_ids.return_value = []
mock_nw_info_build_network.return_value = (None, None)
mock_nw_info_get_ips.return_value = []
mock_nw_info_get_subnets.return_value = fake_subnets
self.mox.ReplayAll()
neutronapi.get_client('fake')
nw_infos = api._build_network_info_model(
self.context, fake_inst)
self.assertEqual(0, len(nw_infos))
def test_get_subnets_from_port(self):
api = neutronapi.API()
port_data = copy.copy(self.port_data1[0])
subnet_data1 = copy.copy(self.subnet_data1)
subnet_data1[0]['host_routes'] = [
{'destination': '192.168.0.0/24', 'nexthop': '1.0.0.10'}
]
self.moxed_client.list_subnets(
id=[port_data['fixed_ips'][0]['subnet_id']]
).AndReturn({'subnets': subnet_data1})
self.moxed_client.list_ports(
network_id=subnet_data1[0]['network_id'],
device_owner='network:dhcp').AndReturn({'ports': []})
self.mox.ReplayAll()
subnets = api._get_subnets_from_port(self.context, port_data)
self.assertEqual(1, len(subnets))
self.assertEqual(1, len(subnets[0]['routes']))
self.assertEqual(subnet_data1[0]['host_routes'][0]['destination'],
subnets[0]['routes'][0]['cidr'])
self.assertEqual(subnet_data1[0]['host_routes'][0]['nexthop'],
subnets[0]['routes'][0]['gateway']['address'])
def test_get_all_empty_list_networks(self):
api = neutronapi.API()
self.moxed_client.list_networks().AndReturn({'networks': []})
self.mox.ReplayAll()
networks = api.get_all(self.context)
self.assertIsInstance(networks, objects.NetworkList)
self.assertEqual(0, len(networks))
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_1(self, mock_get_client):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id1',
'network_id': 'net-id',
'binding:vnic_type': model.VNIC_TYPE_DIRECT,
},
}
test_net = {'network': {'provider:physical_network': 'phynet1'}}
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
mock_client.show_network.return_value = test_net
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
mock_client.show_network.assert_called_once_with(
test_port['port']['network_id'],
fields='provider:physical_network')
self.assertEqual(model.VNIC_TYPE_DIRECT, vnic_type)
self.assertEqual('phynet1', phynet_name)
def _test_get_port_vnic_info(self, mock_get_client,
binding_vnic_type=None):
api = neutronapi.API()
self.mox.ResetAll()
test_port = {
'port': {'id': 'my_port_id2',
'network_id': 'net-id',
},
}
if binding_vnic_type:
test_port['port']['binding:vnic_type'] = binding_vnic_type
mock_get_client.reset_mock()
mock_client = mock_get_client()
mock_client.show_port.return_value = test_port
vnic_type, phynet_name = api._get_port_vnic_info(
self.context, mock_client, test_port['port']['id'])
mock_client.show_port.assert_called_once_with(test_port['port']['id'],
fields=['binding:vnic_type', 'network_id'])
self.assertEqual(model.VNIC_TYPE_NORMAL, vnic_type)
self.assertFalse(phynet_name)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_2(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client,
binding_vnic_type=model.VNIC_TYPE_NORMAL)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_get_port_vnic_info_3(self, mock_get_client):
self._test_get_port_vnic_info(mock_get_client)
@mock.patch.object(neutronapi.API, "_get_port_vnic_info")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_create_pci_requests_for_sriov_ports(self, mock_get_client,
mock_get_port_vnic_info):
api = neutronapi.API()
self.mox.ResetAll()
requested_networks = objects.NetworkRequestList(
objects = [
objects.NetworkRequest(port_id='my_portid1'),
objects.NetworkRequest(network_id='net1'),
objects.NetworkRequest(port_id='my_portid2'),
objects.NetworkRequest(port_id='my_portid3'),
objects.NetworkRequest(port_id='my_portid4')])
pci_requests = objects.InstancePCIRequests(requests=[])
mock_get_port_vnic_info.side_effect = [
(model.VNIC_TYPE_DIRECT, 'phynet1'),
(model.VNIC_TYPE_NORMAL, ''),
(model.VNIC_TYPE_MACVTAP, 'phynet1'),
(model.VNIC_TYPE_MACVTAP, 'phynet2')
]
api.create_pci_requests_for_sriov_ports(
None, pci_requests, requested_networks)
self.assertEqual(3, len(pci_requests.requests))
has_pci_request_id = [net.pci_request_id is not None for net in
requested_networks.objects]
expected_results = [True, False, False, True, True]
self.assertEqual(expected_results, has_pci_request_id)
class TestNeutronv2WithMock(test.TestCase):
"""Used to test Neutron V2 API with mock."""
def setUp(self):
super(TestNeutronv2WithMock, self).setUp()
self.api = neutronapi.API()
self.context = context.RequestContext(
'fake-user', 'fake-project',
auth_token='bff4a5a6b9eb4ea2a6efec6eefb77936')
@mock.patch('oslo_concurrency.lockutils.lock')
def test_get_instance_nw_info_locks_per_instance(self, mock_lock):
instance = objects.Instance(uuid=uuid.uuid4())
api = neutronapi.API()
mock_lock.side_effect = test.TestingException
self.assertRaises(test.TestingException,
api.get_instance_nw_info, 'context', instance)
mock_lock.assert_called_once_with('refresh_cache-%s' % instance.uuid)
@mock.patch('nova.network.neutronv2.api.LOG')
def test_get_instance_nw_info_verify_duplicates_ignored(self, mock_log):
"""test that the returned networks & port_ids from
_gather_port_ids_and_networks doesn't contain any duplicates
The test fakes an instance with two ports connected to two networks.
The _gather_port_ids_and_networks method will be called with the
instance and a list of port ids of which one port id is configured
already to the instance (== duplicate #1) and a list of
networks that already contains a network to which an instance port
is connected (== duplicate #2).
All-in-all, we expect the resulting port ids list to contain 3 items
(["instance_port_1", "port_1", "port_2"]) and the resulting networks
list to contain 3 items (["net_1", "net_2", "instance_network_1"])
while the warning message for duplicate items was executed twice
(due to "duplicate #1" & "duplicate #2")
"""
networks = [model.Network(id="net_1"),
model.Network(id="net_2")]
port_ids = ["port_1", "port_2"]
instance_networks = [{"id": "instance_network_1",
"name": "fake_network",
"tenant_id": "fake_tenant_id"}]
instance_port_ids = ["instance_port_1"]
network_info = model.NetworkInfo(
[{'id': port_ids[0],
'network': networks[0]},
{'id': instance_port_ids[0],
'network': model.Network(
id=instance_networks[0]["id"],
label=instance_networks[0]["name"],
meta={"tenant_id": instance_networks[0]["tenant_id"]})}]
)
instance_uuid = uuid.uuid4()
instance = objects.Instance(uuid=instance_uuid,
info_cache=objects.InstanceInfoCache(
context=self.context,
instance_uuid=instance_uuid,
network_info=network_info))
new_networks, new_port_ids = self.api._gather_port_ids_and_networks(
self.context, instance, networks, port_ids)
self.assertEqual(new_networks, networks + instance_networks)
self.assertEqual(new_port_ids, instance_port_ids + port_ids)
self.assertEqual(2, mock_log.warning.call_count)
@mock.patch('oslo_concurrency.lockutils.lock')
@mock.patch.object(neutronapi.API, '_get_instance_nw_info')
@mock.patch('nova.network.base_api.update_instance_cache_with_nw_info')
def test_get_instance_nw_info(self, mock_update, mock_get, mock_lock):
fake_result = mock.sentinel.get_nw_info_result
mock_get.return_value = fake_result
instance = fake_instance.fake_instance_obj(self.context)
result = self.api.get_instance_nw_info(self.context, instance)
mock_get.assert_called_once_with(self.context, instance)
mock_update.assert_called_once_with(self.api, self.context, instance,
nw_info=fake_result,
update_cells=False)
self.assertEqual(fake_result, result)
def _test_validate_networks_fixed_ip_no_dup(self, nets, requested_networks,
ids, list_port_values):
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
self.fail('Unexpected call to list_ports %s' % search_opts)
with test.nested(
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'list_networks',
return_value={'networks': nets}),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 50}})) as (
list_ports_mock, list_networks_mock, show_quota_mock):
self.api.validate_networks(self.context, requested_networks, 1)
self.assertEqual(len(list_port_values),
len(list_ports_mock.call_args_list))
list_networks_mock.assert_called_once_with(id=ids)
show_quota_mock.assert_called_once_with(tenant_id='fake-project')
def test_validate_networks_over_limit_quota(self):
"""Test validates that a relevant exception is being raised when
there are more ports defined, than there is a quota for it.
"""
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project', 'fields': ['id']},
{'ports': [1, 2, 3, 4, 5]})]
nets = [{'subnets': '1'}, {'subnets': '2'}]
def _fake_list_ports(**search_opts):
for args, return_value in list_port_values:
if args == search_opts:
return return_value
with test.nested(
mock.patch.object(self.api, '_get_available_networks',
return_value=nets),
mock.patch.object(client.Client, 'list_ports',
side_effect=_fake_list_ports),
mock.patch.object(client.Client, 'show_quota',
return_value={'quota': {'port': 1}})):
exc = self.assertRaises(exception.PortLimitExceeded,
self.api.validate_networks,
self.context, requested_networks, 1)
expected_exception_msg = ('The number of defined ports: '
'%(ports)d is over the limit: '
'%(quota)d' %
{'ports': 5,
'quota': 1})
self.assertEqual(expected_exception_msg, str(exc))
def test_validate_networks_fixed_ip_no_dup1(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because no fixed ips in use
nets1 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
ids = ['my_netid1']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project', 'fields': ['id']},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets1, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_no_dup2(self):
# Test validation for a request for a network with a
# fixed ip that is not already in use because not used on this net id
nets2 = [{'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': 'fake-project'},
{'id': 'my_netid2',
'name': 'my_netname2',
'subnets': ['mysubnid2'],
'tenant_id': 'fake-project'}]
requested_networks = [('my_netid1', '10.0.1.2', None, None),
('my_netid2', '10.0.1.3', None, None)]
ids = ['my_netid1', 'my_netid2']
list_port_values = [({'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'},
{'ports': []}),
({'network_id': 'my_netid2',
'fixed_ips': 'ip_address=10.0.1.3',
'fields': 'device_id'},
{'ports': []}),
({'tenant_id': 'fake-project', 'fields': ['id']},
{'ports': []})]
self._test_validate_networks_fixed_ip_no_dup(nets2, requested_networks,
ids, list_port_values)
def test_validate_networks_fixed_ip_dup(self):
# Test validation for a request for a network with a
# fixed ip that is already in use
requested_networks = [('my_netid1', '10.0.1.2', None, None)]
list_port_mock_params = {'network_id': 'my_netid1',
'fixed_ips': 'ip_address=10.0.1.2',
'fields': 'device_id'}
list_port_mock_return = {'ports': [({'device_id': 'my_deviceid'})]}
with mock.patch.object(client.Client, 'list_ports',
return_value=list_port_mock_return) as (
list_ports_mock):
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api.validate_networks,
self.context, requested_networks, 1)
list_ports_mock.assert_called_once_with(**list_port_mock_params)
def test_allocate_floating_ip_exceed_limit(self):
# Verify that the correct exception is thrown when quota exceed
pool_name = 'dummy'
api = neutronapi.API()
with test.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.OverQuotaClient()
self.assertRaises(exception.FloatingIpLimitExceeded,
api.allocate_floating_ip,
self.context, pool_name)
def test_allocate_floating_ip_no_ipv4_subnet(self):
api = neutronapi.API()
net_id = uuid.uuid4()
error_msg = ('Bad floatingip request: Network %s does not contain '
'any IPv4 subnet' % net_id)
with test.nested(
mock.patch.object(client.Client, 'create_floatingip'),
mock.patch.object(api,
'_get_floating_ip_pool_id_by_name_or_id')) as (
create_mock, get_mock):
create_mock.side_effect = exceptions.BadRequest(error_msg)
self.assertRaises(exception.FloatingIpBadRequest,
api.allocate_floating_ip, self.context,
'ext_net')
def test_create_port_for_instance_no_more_ip(self):
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
with mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressGenerationFailureClient()) as (
create_port_mock):
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone}}
self.assertRaises(exception.NoMoreFixedIps,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body)
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.MacAddressInUseClient())
def test_create_port_for_instance_mac_address_in_use(self,
create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
available_macs = set(['XX:XX:XX:XX:XX:XX'])
# Run the code.
self.assertRaises(exception.PortInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
available_macs=available_macs)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.IpAddressInUseClient())
def test_create_port_for_fixed_ip_in_use(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
self.assertRaises(exception.FixedIpAlreadyInUse,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
@mock.patch.object(client.Client, 'create_port',
side_effect=exceptions.InvalidIpForNetworkClient())
def test_create_port_with_invalid_ip_for_network(self, create_port_mock):
# Create fake data.
instance = fake_instance.fake_instance_obj(self.context)
net = {'id': 'my_netid1',
'name': 'my_netname1',
'subnets': ['mysubnid1'],
'tenant_id': instance['project_id']}
zone = 'compute:%s' % instance['availability_zone']
port_req_body = {'port': {'device_id': instance['uuid'],
'device_owner': zone,
'mac_address': 'XX:XX:XX:XX:XX:XX'}}
fake_ip = '1.1.1.1'
# Run the code.
exc = self.assertRaises(exception.InvalidInput,
self.api._create_port,
neutronapi.get_client(self.context),
instance, net['id'], port_req_body,
fixed_ip=fake_ip)
# Assert the exception message
expected_exception_msg = ('Invalid input received: Fixed IP %(ip)s is '
'not a valid ip address for network '
'%(net_id)s.' %
{'ip': fake_ip, 'net_id': net['id']})
self.assertEqual(expected_exception_msg, str(exc))
# Assert the calls.
create_port_mock.assert_called_once_with(port_req_body)
def test_get_network_detail_not_found(self):
api = neutronapi.API()
expected_exc = exceptions.NetworkNotFoundClient()
network_uuid = '02cacbca-7d48-4a2c-8011-43eecf8a9786'
with mock.patch.object(client.Client, 'show_network',
side_effect=expected_exc) as (
fake_show_network):
self.assertRaises(exception.NetworkNotFound,
api.get,
self.context,
network_uuid)
fake_show_network.assert_called_once_with(network_uuid)
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.API.'
'_refresh_neutron_extensions_cache')
def test_deallocate_for_instance_uses_delete_helper(self,
mock_refresh,
mock_preexisting):
# setup fake data
instance = fake_instance.fake_instance_obj(self.context)
mock_preexisting.return_value = []
port_data = {'ports': [{'id': str(uuid.uuid4())}]}
ports = set([port['id'] for port in port_data.get('ports')])
api = neutronapi.API()
# setup mocks
mock_client = mock.Mock()
mock_client.list_ports.return_value = port_data
with test.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports')
) as (
mock_get_client, mock_delete
):
# run the code
api.deallocate_for_instance(self.context, instance)
# assert the calls
mock_client.list_ports.assert_called_once_with(
device_id=instance.uuid)
mock_delete.assert_called_once_with(
mock_client, instance, ports, raise_if_fail=True)
def _test_delete_ports(self, expect_raise):
results = [exceptions.NeutronClientException, None]
mock_client = mock.Mock()
with mock.patch.object(mock_client, 'delete_port',
side_effect=results):
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1', 'port2'],
raise_if_fail=expect_raise)
def test_delete_ports_raise(self):
self.assertRaises(exceptions.NeutronClientException,
self._test_delete_ports, True)
def test_delete_ports_no_raise(self):
self._test_delete_ports(False)
def test_delete_ports_never_raise_404(self):
mock_client = mock.Mock()
mock_client.delete_port.side_effect = exceptions.PortNotFoundClient
api = neutronapi.API()
api._delete_ports(mock_client, {'uuid': 'foo'}, ['port1'],
raise_if_fail=True)
mock_client.delete_port.assert_called_once_with('port1')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
def test_deallocate_port_for_instance_fails(self, mock_preexisting):
mock_preexisting.return_value = []
mock_client = mock.Mock()
api = neutronapi.API()
with test.nested(
mock.patch.object(neutronapi, 'get_client',
return_value=mock_client),
mock.patch.object(api, '_delete_ports',
side_effect=exceptions.Unauthorized),
mock.patch.object(api, 'get_instance_nw_info')
) as (
get_client, delete_ports, get_nw_info
):
self.assertRaises(exceptions.Unauthorized,
api.deallocate_port_for_instance,
self.context, instance={'uuid': 'fake'},
port_id='fake')
# make sure that we didn't try to reload nw info
self.assertFalse(get_nw_info.called)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def _test_show_port_exceptions(self, client_exc, expected_nova_exc,
get_client_mock):
show_port_mock = mock.Mock(side_effect=client_exc)
get_client_mock.return_value.show_port = show_port_mock
self.assertRaises(expected_nova_exc, self.api.show_port,
self.context, 'fake_port_id')
def test_show_port_not_found(self):
self._test_show_port_exceptions(exceptions.PortNotFoundClient,
exception.PortNotFound)
def test_show_port_forbidden(self):
self._test_show_port_exceptions(exceptions.Unauthorized,
exception.Forbidden)
def test_show_port_unknown_exception(self):
self._test_show_port_exceptions(exceptions.NeutronClientException,
exception.NovaException)
def test_get_network(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'show_network') as mock_show:
mock_show.return_value = {
'network': {'id': 'fake-uuid', 'name': 'fake-network'}
}
net_obj = api.get(self.context, 'fake-uuid')
self.assertEqual('fake-network', net_obj.label)
self.assertEqual('fake-network', net_obj.name)
self.assertEqual('fake-uuid', net_obj.uuid)
def test_get_all_networks(self):
api = neutronapi.API()
with mock.patch.object(client.Client, 'list_networks') as mock_list:
mock_list.return_value = {
'networks': [
{'id': 'fake-uuid1', 'name': 'fake-network1'},
{'id': 'fake-uuid2', 'name': 'fake-network2'},
]}
net_objs = api.get_all(self.context)
self.assertIsInstance(net_objs, objects.NetworkList)
self.assertEqual(2, len(net_objs))
self.assertEqual(('fake-uuid1', 'fake-network1'),
(net_objs[0].uuid, net_objs[0].name))
self.assertEqual(('fake-uuid2', 'fake-network2'),
(net_objs[1].uuid, net_objs[1].name))
@mock.patch.object(neutronapi.API, "_refresh_neutron_extensions_cache")
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_instance_vnic_index(self, mock_get_client,
mock_refresh_extensions):
api = neutronapi.API()
api.extensions = set([constants.VNIC_INDEX_EXT])
mock_client = mock_get_client()
mock_client.update_port.return_value = 'port'
instance = {'project_id': '9d049e4b60b64716978ab415e6fbd5c0',
'uuid': str(uuid.uuid4()),
'display_name': 'test_instance',
'availability_zone': 'nova',
'host': 'some_host'}
instance = objects.Instance(**instance)
vif = {'id': 'fake-port-id'}
api.update_instance_vnic_index(self.context, instance, vif, 7)
port_req_body = {'port': {'vnic_index': 7}}
mock_client.update_port.assert_called_once_with('fake-port-id',
port_req_body)
@mock.patch.object(neutronapi, 'get_client', return_value=mock.Mock())
def test_update_port_bindings_for_instance_same_host(self,
get_client_mock):
instance = fake_instance.fake_instance_obj(self.context)
self.api._has_port_binding_extension = mock.Mock(return_value=True)
# We test two ports, one with the same host as the host passed in and
# one where binding:host_id isn't set, so we update that port.
fake_ports = {'ports': [
{'id': 'fake-port-1',
'binding:host_id': instance.host},
{'id': 'fake-port-2'}]}
list_ports_mock = mock.Mock(return_value=fake_ports)
get_client_mock.return_value.list_ports = list_ports_mock
update_port_mock = mock.Mock()
get_client_mock.return_value.update_port = update_port_mock
self.api._update_port_binding_for_instance(self.context, instance,
instance.host)
# Assert that update_port was only called on the port without a host.
update_port_mock.assert_called_once_with(
'fake-port-2', {'port': {'binding:host_id': instance.host}})
@mock.patch('nova.network.neutronv2.api.compute_utils')
def test_get_preexisting_port_ids(self, mocked_comp_utils):
mocked_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
result = self.api._get_preexisting_port_ids(None)
self.assertEqual(['2', '3'], result, "Invalid preexisting ports")
def _test_unbind_ports_get_client(self, mock_neutron,
mock_has_ext, has_ext=False):
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
ports = ["1", "2", "3"]
self.api._unbind_ports(mock_ctx, ports, mock_neutron)
get_client_calls = []
get_client_calls.append(mock.call(mock_ctx)
if not has_ext else
mock.call(mock_ctx, admin=True))
if has_ext:
self.assertEqual(1, mock_neutron.call_count)
mock_neutron.assert_has_calls(get_client_calls, True)
else:
self.assertEqual(0, mock_neutron.call_count)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client_binding_extension(self,
mock_neutron,
mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_get_client(self, mock_neutron, mock_has_ext):
self._test_unbind_ports_get_client(mock_neutron, mock_has_ext)
def _test_unbind_ports(self, mock_neutron, mock_has_ext, has_ext=False):
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = has_ext
mock_neutron.return_value = mock_client
ports = ["1", "2", "3"]
api = neutronapi.API()
api._unbind_ports(mock_ctx, ports, mock_client)
body = {'port': {'device_id': '', 'device_owner': ''}}
if has_ext:
body['port']['binding:host_id'] = None
body['port']['binding:profile'] = {}
update_port_calls = []
for p in ports:
update_port_calls.append(mock.call(p, body))
self.assertEqual(3, mock_update_port.call_count)
mock_update_port.assert_has_calls(update_port_calls)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports_binding_ext(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, True)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_unbind_ports(self, mock_neutron, mock_has_ext):
self._test_unbind_ports(mock_neutron, mock_has_ext, False)
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
def test_unbind_ports_no_port_ids(self, mock_has_ext):
# Tests that None entries in the ports list are filtered out.
mock_client = mock.Mock()
mock_update_port = mock.Mock()
mock_client.update_port = mock_update_port
mock_ctx = mock.Mock(is_admin=False)
mock_has_ext.return_value = True
api = neutronapi.API()
api._unbind_ports(mock_ctx, [None], mock_client, mock_client)
self.assertFalse(mock_update_port.called)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.excutils')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_allocate_for_instance_unbind(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_unbind,
mock_log,
mock_cena,
mock_del_ports,
mock_exeu,
mock_giwn):
mock_nc = mock.Mock()
def show_port(port_id):
return {'port': {'network_id': 'net-1', 'id': port_id,
'tenant_id': 'proj-1'}}
mock_nc.show_port = show_port
mock_ntrn.return_value = mock_nc
mock_nc.update_port.side_effect = [True, True, Exception]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_has_pbe.return_value = False
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake-port1'),
objects.NetworkRequest(port_id='fake-port2'),
objects.NetworkRequest(port_id='fail-port')])
mock_avail_nets.return_value = [{'id': 'net-1'}]
self.api.allocate_for_instance(mock.sentinel.ctx,
mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
['fake-port1', 'fake-port2'],
mock.ANY,
mock.ANY)
@mock.patch('nova.network.neutronv2.api.API._process_requested_networks')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_allocate_port_for_instance_no_networks(self,
mock_getclient,
mock_avail_nets,
mock_has_pbe,
mock_process_request_net):
"""Tests that if no networks are requested and no networks are
available, we fail with InterfaceAttachFailedNoNetwork.
"""
instance = fake_instance.fake_instance_obj(self.context)
mock_has_pbe.return_value = False
mock_process_request_net.return_value = ({}, [], [], None)
mock_avail_nets.return_value = []
api = neutronapi.API()
ex = self.assertRaises(exception.InterfaceAttachFailedNoNetwork,
api.allocate_port_for_instance,
self.context, instance, port_id=None)
self.assertEqual(
"No specific network was requested and none are available for "
"project 'fake-project'.", six.text_type(ex))
@mock.patch('nova.objects.network_request.utils')
@mock.patch('nova.network.neutronv2.api.LOG')
@mock.patch('nova.network.neutronv2.api.base_api')
@mock.patch('nova.network.neutronv2.api.API._delete_ports')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.API._get_preexisting_port_ids')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_for_instance(self, mock_ntrn,
mock_gppids,
mock_unbind,
mock_deletep,
mock_baseapi,
mock_log,
req_utils):
req_utils.is_neutron.return_value = True
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_ports.return_value = {'ports': [
{'id': 'port-1'}, {'id': 'port-2'}, {'id': 'port-3'}
]}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(network_id='net-1',
address='192.168.0.3',
port_id='port-1',
pci_request_id='pci-1')])
mock_gppids.return_value = ['port-3']
self.api.deallocate_for_instance(mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
mock_unbind.assert_called_once_with(mock.sentinel.ctx,
set(['port-1', 'port-3']),
mock.ANY)
mock_deletep.assert_called_once_with(mock_nc,
mock_inst,
set(['port-2']),
raise_if_fail=True)
@mock.patch('nova.network.neutronv2.api.API.get_instance_nw_info')
@mock.patch('nova.network.neutronv2.api.API._unbind_ports')
@mock.patch('nova.network.neutronv2.api.compute_utils')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_preexisting_deallocate_port_for_instance(self,
mock_ntrn,
mock_comp_utils,
mock_unbind,
mock_netinfo):
mock_comp_utils.get_nw_info_for_instance.return_value = [model.VIF(
id='1', preserve_on_delete=False), model.VIF(
id='2', preserve_on_delete=True), model.VIF(
id='3', preserve_on_delete=True)]
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_client = mock.Mock()
mock_ntrn.return_value = mock_client
self.api.deallocate_port_for_instance(mock.sentinel.ctx,
mock_inst, '2')
mock_unbind.assert_called_once_with(mock.sentinel.ctx, ['2'],
mock_client)
@mock.patch('nova.network.neutronv2.api.API.'
'_check_external_network_attach')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.API.'
'_populate_neutron_extension_values')
@mock.patch('nova.network.neutronv2.api.API._get_available_networks')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_created_port(self, mock_ntrn,
mock_avail_nets,
mock_ext_vals,
mock_has_pbe,
mock_cena):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_avail_nets.return_value = [{'id': 'net-1'}]
mock_nc.create_port.return_value = {'port': {'id': 'fake_id',
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}}
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx,
mock_inst)
mock_nc.delete_port.assert_called_once_with('fake_id')
@mock.patch('nova.network.neutronv2.api.API._show_port')
@mock.patch('nova.network.neutronv2.api.API._has_port_binding_extension')
@mock.patch('nova.network.neutronv2.api.get_client')
def test_port_binding_failed_with_request(self, mock_ntrn,
mock_has_pbe,
mock_show_port):
mock_has_pbe.return_value = True
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_inst = mock.Mock(project_id="proj-1",
availability_zone='zone-1',
uuid='inst-1')
mock_show_port.return_value = {
'tenant_id': mock_inst.project_id,
'binding:vif_type': 'binding_failed'}
nw_req = objects.NetworkRequestList(
objects = [objects.NetworkRequest(port_id='fake_id')])
self.assertRaises(exception.PortBindingFailed,
self.api.allocate_for_instance,
mock.sentinel.ctx, mock_inst,
requested_networks=nw_req)
@mock.patch('nova.network.neutronv2.api.get_client')
def test_get_floating_ip_by_address_not_found_neutron_not_found(self,
mock_ntrn):
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_floatingips.side_effect = exceptions.NotFound()
address = '172.24.4.227'
self.assertRaises(exception.FloatingIpNotFoundForAddress,
self.api.get_floating_ip_by_address,
self.context, address)
@mock.patch('nova.network.neutronv2.api.get_client')
def test_get_floating_ip_by_address_not_found_neutron_raises_non404(self,
mock_ntrn):
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_floatingips.side_effect = exceptions.InternalServerError()
address = '172.24.4.227'
self.assertRaises(exceptions.InternalServerError,
self.api.get_floating_ip_by_address,
self.context, address)
@mock.patch('nova.network.neutronv2.api.get_client')
def test_get_floating_ips_by_project_not_found(self, mock_ntrn):
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_floatingips.side_effect = exceptions.NotFound()
fips = self.api.get_floating_ips_by_project(self.context)
self.assertEqual([], fips)
@mock.patch('nova.network.neutronv2.api.get_client')
def test_get_floating_ips_by_project_not_found_legacy(self, mock_ntrn):
# FIXME(danms): Remove this test along with the code path it tests
# when bug 1513879 is fixed.
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
# neutronclient doesn't raise NotFound in this scenario, it raises a
# NeutronClientException with status_code=404
notfound = exceptions.NeutronClientException(status_code=404)
mock_nc.list_floatingips.side_effect = notfound
fips = self.api.get_floating_ips_by_project(self.context)
self.assertEqual([], fips)
@mock.patch('nova.network.neutronv2.api.get_client')
def test_get_floating_ips_by_project_raises_non404(self, mock_ntrn):
mock_nc = mock.Mock()
mock_ntrn.return_value = mock_nc
mock_nc.list_floatingips.side_effect = exceptions.InternalServerError()
self.assertRaises(exceptions.InternalServerError,
self.api.get_floating_ips_by_project,
self.context)
def test_unbind_ports_reset_dns_name(self):
neutron = mock.Mock()
port_client = mock.Mock()
with mock.patch.object(self.api, '_has_port_binding_extension',
return_value=False):
self.api.extensions = [constants.DNS_INTEGRATION]
ports = [uuids.port_id]
self.api._unbind_ports(self.context, ports, neutron, port_client)
port_req_body = {'port': {'device_id': '',
'device_owner': '',
'dns_name': ''}}
port_client.update_port.assert_called_once_with(
uuids.port_id, port_req_body)
class TestNeutronv2ModuleMethods(test.NoDBTestCase):
def test_gather_port_ids_and_networks_wrong_params(self):
api = neutronapi.API()
# Test with networks not None and port_ids is None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
[{'network': {'name': 'foo'}}], None)
# Test with networks is None and port_ids not None
self.assertRaises(exception.NovaException,
api._gather_port_ids_and_networks,
'fake_context', 'fake_instance',
None, ['list', 'of', 'port_ids'])
def test_ensure_requested_network_ordering_no_preference_ids(self):
l = [1, 2, 3]
neutronapi._ensure_requested_network_ordering(
lambda x: x,
l,
None)
def test_ensure_requested_network_ordering_no_preference_hashes(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
None)
self.assertEqual(l, [{'id': 3}, {'id': 1}, {'id': 2}])
def test_ensure_requested_network_ordering_with_preference(self):
l = [{'id': 3}, {'id': 1}, {'id': 2}]
neutronapi._ensure_requested_network_ordering(
lambda x: x['id'],
l,
[1, 2, 3])
self.assertEqual(l, [{'id': 1}, {'id': 2}, {'id': 3}])
class TestNeutronv2Portbinding(TestNeutronv2Base):
def test_allocate_for_instance_portbinding(self):
self._allocate_for_instance(1, portbinding=True,
bind_host_id=self.instance.get('host'))
def test_populate_neutron_extension_values_binding(self):
api = neutronapi.API()
neutronapi.get_client(mox.IgnoreArg()).AndReturn(
self.moxed_client)
self.moxed_client.list_extensions().AndReturn(
{'extensions': [{'name': constants.PORTBINDING_EXT}]})
self.mox.ReplayAll()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
api._populate_neutron_extension_values(self.context, instance,
None, port_req_body,
bind_host_id=host_id)
self.assertEqual(host_id, port_req_body['port']['binding:host_id'])
self.assertFalse(port_req_body['port'].get('binding:profile'))
@mock.patch.object(pci_whitelist, 'get_pci_device_devspec')
@mock.patch.object(pci_manager, 'get_instance_pci_devs')
def test_populate_neutron_extension_values_binding_sriov(self,
mock_get_instance_pci_devs,
mock_get_pci_device_devspec):
api = neutronapi.API()
host_id = 'my_host_id'
instance = {'host': host_id}
port_req_body = {'port': {}}
pci_req_id = 'my_req_id'
pci_dev = {'vendor_id': '1377',
'product_id': '0047',
'address': '0000:0a:00.1',
}
PciDevice = collections.namedtuple('PciDevice',
['vendor_id', 'product_id', 'address'])
mydev = PciDevice(**pci_dev)
profile = {'pci_vendor_info': '1377:0047',
'pci_slot': '0000:0a:00.1',
'physical_network': 'phynet1',
}
mock_get_instance_pci_devs.return_value = [mydev]
devspec = mock.Mock()
devspec.get_tags.return_value = {'physical_network': 'phynet1'}
mock_get_pci_device_devspec.return_value = devspec
api._populate_neutron_binding_profile(instance,
pci_req_id, port_req_body)
self.assertEqual(profile, port_req_body['port']['binding:profile'])
def _test_update_port_binding_false(self, func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(False)
self.mox.ReplayAll()
func(*args)
def _test_update_port_binding_true(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndReturn(None)
self.mox.ReplayAll()
func(*args)
def _test_update_port_true_exception(self, expected_bind_host,
func_name, *args):
api = neutronapi.API()
func = getattr(api, func_name)
self.mox.StubOutWithMock(api, '_has_port_binding_extension')
api._has_port_binding_extension(mox.IgnoreArg(),
refresh_cache=True).AndReturn(True)
neutronapi.get_client(mox.IgnoreArg(), admin=True).AndReturn(
self.moxed_client)
search_opts = {'device_id': self.instance['uuid'],
'tenant_id': self.instance['project_id']}
ports = {'ports': [{'id': 'test1'}]}
self.moxed_client.list_ports(**search_opts).AndReturn(ports)
port_req_body = {'port':
{'binding:host_id': expected_bind_host}}
self.moxed_client.update_port('test1',
port_req_body).AndRaise(
Exception("fail to update port"))
self.mox.ReplayAll()
self.assertRaises(NEUTRON_CLIENT_EXCEPTION,
func,
*args)
def test_migrate_instance_finish_binding_false(self):
self._test_update_port_binding_false('migrate_instance_finish',
self.context, None,
{'dest_compute': 'fake'})
def test_migrate_instance_finish_binding_true(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_migrate_instance_finish_binding_true_exception(self):
migration = {'source_compute': self.instance.get('host'),
'dest_compute': 'dest_host'}
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception('dest_host',
'migrate_instance_finish',
self.context,
instance,
migration)
def test_setup_instance_network_on_host_false(self):
self._test_update_port_binding_false(
'setup_instance_network_on_host', self.context, None,
'fake_host')
def test_setup_instance_network_on_host_true(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_binding_true('fake_host',
'setup_instance_network_on_host',
self.context,
instance,
'fake_host')
def test_setup_instance_network_on_host_exception(self):
instance = self._fake_instance_object(self.instance)
self._test_update_port_true_exception(
'fake_host', 'setup_instance_network_on_host',
self.context, instance, 'fake_host')
def test_associate_not_implemented(self):
api = neutronapi.API()
self.assertRaises(NotImplementedError,
api.associate,
self.context, 'id')
class TestNeutronv2ExtraDhcpOpts(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2ExtraDhcpOpts, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_allocate_for_instance_1_with_extra_dhcp_opts_turned_off(self):
self._allocate_for_instance(1, extra_dhcp_opts=False)
def test_allocate_for_instance_extradhcpopts(self):
dhcp_opts = [{'opt_name': 'bootfile-name',
'opt_value': 'pxelinux.0'},
{'opt_name': 'tftp-server',
'opt_value': '123.123.123.123'},
{'opt_name': 'server-ip-address',
'opt_value': '123.123.123.456'}]
self._allocate_for_instance(1, dhcp_options=dhcp_opts)
class TestNeutronv2NeutronHostnameDNS(TestNeutronv2Base):
def setUp(self):
super(TestNeutronv2NeutronHostnameDNS, self).setUp()
neutronapi.get_client(mox.IgnoreArg()).MultipleTimes().AndReturn(
self.moxed_client)
def test_allocate_for_instance_create_port(self):
# The port's dns_name attribute should be set by the port create
# request in allocate_for_instance
self._allocate_for_instance(1, dns_extension=True)
def test_allocate_for_instance_with_requested_port(self):
# The port's dns_name attribute should be set by the port update
# request in allocate_for_instance
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1, dns_extension=True,
requested_networks=requested_networks)
def test_allocate_for_instance_port_dns_name_preset_equal_hostname(self):
# The port's dns_name attribute should be set by the port update
# request in allocate_for_instance. The port's dns_name was preset by
# the user with a value equal to the instance's hostname
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1, dns_extension=True,
requested_networks=requested_networks,
_dns_name='test-instance')
def test_allocate_for_instance_port_dns_name_preset_noteq_hostname(self):
# If a pre-existing port has dns_name set, an exception should be
# raised if dns_name is not equal to the instance's hostname
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
api = self._stub_allocate_for_instance(
requested_networks=requested_networks,
dns_extension=True,
_break='pre_list_networks',
_dns_name='my-instance')
self.assertRaises(exception.PortNotUsableDNS,
api.allocate_for_instance, self.context,
self.instance, requested_networks=requested_networks)
class TestNeutronv2NeutronHostnameDNSPortbinding(TestNeutronv2Base):
def test_allocate_for_instance_create_port(self):
# The port's dns_name attribute should be set by the port create
# request in allocate_for_instance
self._allocate_for_instance(1, portbinding=True, dns_extension=True,
bind_host_id=self.instance.get('host'))
def test_allocate_for_instance_with_requested_port(self):
# The port's dns_name attribute should be set by the port update
# request in allocate_for_instance
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=1, dns_extension=True,
portbinding=True,
bind_host_id=self.instance.get('host'),
requested_networks=requested_networks)
def test_allocate_for_instance_create_port_with_dns_domain(self):
# The port's dns_name attribute should be set by the port update
# request in _update_port_dns_name. This should happen only when the
# port binding extension is enabled and the port's network has a
# non-blank dns_domain attribute
self._allocate_for_instance(11, portbinding=True, dns_extension=True,
bind_host_id=self.instance.get('host'))
def test_allocate_for_instance_with_requested_port_with_dns_domain(self):
# The port's dns_name attribute should be set by the port update
# request in _update_port_dns_name. This should happen only when the
# port binding extension is enabled and the port's network has a
# non-blank dns_domain attribute
requested_networks = objects.NetworkRequestList(
objects=[objects.NetworkRequest(port_id='my_portid1')])
self._allocate_for_instance(net_idx=11, dns_extension=True,
portbinding=True,
bind_host_id=self.instance.get('host'),
requested_networks=requested_networks)
class TestNeutronClientForAdminScenarios(test.NoDBTestCase):
def setUp(self):
super(TestNeutronClientForAdminScenarios, self).setUp()
# NOTE(morganfainberg): The real configuration fixture here is used
# instead o the already existing fixtures to ensure that the new
# config options are automatically deregistered at the end of the
# test run. Without the use of this fixture, the config options
# from the plugin(s) would persist for all subsequent tests from when
# these are run (due to glonal conf object) and not be fully
# representative of a "clean" slate at the start of a test.
self.config_fixture = self.useFixture(config_fixture.Config())
oslo_opts = ks_loading.get_auth_plugin_conf_options('v2password')
self.config_fixture.register_opts(oslo_opts, 'neutron')
@requests_mock.mock()
def _test_get_client_for_admin(self, req_mock,
use_id=False, admin_context=False):
token_value = uuid.uuid4().hex
auth_url = 'http://anyhost/auth'
token_resp = V2Token(token_id=token_value)
req_mock.post(auth_url + '/tokens', json=token_resp)
self.flags(url='http://anyhost/', group='neutron')
self.flags(auth_type='v2password', group='neutron')
self.flags(auth_url=auth_url, group='neutron')
self.flags(timeout=30, group='neutron')
if use_id:
self.flags(tenant_id='tenant_id', group='neutron')
self.flags(user_id='user_id', group='neutron')
if admin_context:
my_context = context.get_admin_context()
else:
my_context = context.RequestContext('userid', 'my_tenantid',
auth_token='token')
# clean global
neutronapi.reset_state()
if admin_context:
# Note that the context does not contain a token but is
# an admin context which will force an elevation to admin
# credentials.
context_client = neutronapi.get_client(my_context)
else:
# Note that the context is not elevated, but the True is passed in
# which will force an elevation to admin credentials even though
# the context has an auth_token.
context_client = neutronapi.get_client(my_context, True)
admin_auth = neutronapi._ADMIN_AUTH
self.assertEqual(CONF.neutron.auth_url, admin_auth.auth_url)
self.assertEqual(CONF.neutron.password, admin_auth.password)
if use_id:
self.assertEqual(CONF.neutron.tenant_id,
admin_auth.tenant_id)
self.assertEqual(CONF.neutron.user_id, admin_auth.user_id)
self.assertIsNone(admin_auth.tenant_name)
self.assertIsNone(admin_auth.username)
else:
self.assertEqual(CONF.neutron.username, admin_auth.username)
self.assertIsNone(admin_auth.tenant_id)
self.assertIsNone(admin_auth.user_id)
self.assertEqual(CONF.neutron.timeout,
neutronapi._SESSION.timeout)
self.assertEqual(
token_value,
context_client.httpclient.auth.get_token(neutronapi._SESSION))
self.assertEqual(
CONF.neutron.url,
context_client.httpclient.get_endpoint())
def test_get_client_for_admin(self):
self._test_get_client_for_admin()
def test_get_client_for_admin_with_id(self):
self._test_get_client_for_admin(use_id=True)
def test_get_client_for_admin_context(self):
self._test_get_client_for_admin(admin_context=True)
def test_get_client_for_admin_context_with_id(self):
self._test_get_client_for_admin(use_id=True, admin_context=True)
|
{
"content_hash": "c61b0de9f1a94a13816cdbe1d6a8f323",
"timestamp": "",
"source": "github",
"line_count": 4200,
"max_line_length": 79,
"avg_line_length": 46.86571428571428,
"alnum_prop": 0.5426649596618505,
"repo_name": "HybridF5/nova",
"id": "8c835ec68e5dd81b6a196a2fac1eade9c6bdaac1",
"size": "197473",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "nova/tests/unit/network/test_neutronv2.py",
"mode": "33188",
"license": "apache-2.0",
"language": [],
"symlink_target": ""
}
|
"""
Server API
Reference for Server API (REST/Json)
OpenAPI spec version: 2.0.21
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kinow_client
from kinow_client.rest import ApiException
from kinow_client.models.extract_id_list import ExtractIDList
class TestExtractIDList(unittest.TestCase):
""" ExtractIDList unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testExtractIDList(self):
"""
Test ExtractIDList
"""
model = kinow_client.models.extract_id_list.ExtractIDList()
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "fd70333159e04730fcafab5c3c4fb810",
"timestamp": "",
"source": "github",
"line_count": 40,
"max_line_length": 68,
"avg_line_length": 18.525,
"alnum_prop": 0.6612685560053981,
"repo_name": "kinow-io/kinow-python-sdk",
"id": "a1aa456ca3fde67edfce9fa8c76ac2aea3dc6892",
"size": "758",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "test/test_extract_id_list.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "4659182"
},
{
"name": "Shell",
"bytes": "1666"
}
],
"symlink_target": ""
}
|
import Tkinter
from PIL import ImageTk, Image
import DrawMain
# This class creates window that allows the user to choose a wallpaper pattern
class ChooseWallpaper:
def __init__(self, meta):
self.root = Tkinter.Toplevel()
self.meta = meta
self.cv = Tkinter.Canvas(self.root, width=521, height=710, highlightthickness=0, bd=0)
self.cv.pack()
photos=[]
ids=[]
photos.append(ImageTk.PhotoImage(Image.open('Resources/wp1.bmp')))
ids.append(self.cv.create_image(3,3, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(131, 170, text='p1', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(0))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wcm.bmp')))
ids.append(self.cv.create_image(262,3, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(390, 170, text='cm', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(1))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wp2.bmp')))
ids.append(self.cv.create_image(3,180, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(131, 348, text='p2', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(2))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wpmm.bmp')))
ids.append(self.cv.create_image(262,180, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(390, 348, text='pmm', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(3))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wpm.bmp')))
ids.append(self.cv.create_image(3,358, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(131, 525, text='pm', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(4))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wpmg.bmp')))
ids.append(self.cv.create_image(262,358, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(390, 525, text='pmg', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e:self.start(5))
photos.append(ImageTk.PhotoImage(Image.open('Resources/wcmm.bmp')))
ids.append(self.cv.create_image(136,533, image=photos[-1], anchor=Tkinter.NW))
self.cv.create_text(256, 700, text='cmm', fill='slate gray')
self.cv.tag_bind(ids[-1], '<Button-1>', lambda e: self.start(6))
self.root.mainloop()
def start(self, i):
DrawMain.Draw(1, i, self.meta)
self.root.destroy()
|
{
"content_hash": "4ad8494d23a71f98a828acb853d339dd",
"timestamp": "",
"source": "github",
"line_count": 56,
"max_line_length": 94,
"avg_line_length": 47.357142857142854,
"alnum_prop": 0.6376319758672699,
"repo_name": "mntalateyya/Shapes_Studio",
"id": "441e550c9cba7b741a1e581ad44dea8829dcb581",
"size": "2652",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "ChooseWallpaper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "60711"
}
],
"symlink_target": ""
}
|
"""
.. module: cloudaux.gcp.gce.network
:platform: Unix
:copyright: (c) 2016 by Google Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Tom Melendez (@supertom) <supertom@google.com>
"""
from cloudaux.gcp.utils import gce_list, gce_list_aggregated
from cloudaux.gcp.decorators import gcp_conn
@gcp_conn('gce')
def list_networks(client=None, **kwargs):
"""
:rtype: ``list``
"""
return gce_list(service=client.networks(),
**kwargs)
@gcp_conn('gce')
def list_subnetworks(client=None, **kwargs):
"""
:rtype: ``list``
"""
return gce_list_aggregated(service=client.subnetworks(),
key_name='subnetworks', **kwargs)
@gcp_conn('gce')
def get_network(client=None, **kwargs):
service = client.networks()
req = service.get(project=kwargs['project'], network=kwargs['Network'])
resp = req.execute()
return resp
@gcp_conn('gce')
def get_subnetwork(client=None, **kwargs):
service = client.subnetworks()
req = service.get(project=kwargs['project'],
subnetwork=kwargs['Subnetwork'], region=kwargs['Region'])
resp = req.execute()
return resp
|
{
"content_hash": "0703cb8278dbd59669262f08f147ea67",
"timestamp": "",
"source": "github",
"line_count": 44,
"max_line_length": 79,
"avg_line_length": 28.045454545454547,
"alnum_prop": 0.6256077795786061,
"repo_name": "Netflix-Skunkworks/cloudaux",
"id": "5efd290ddba0c4b7438f86c46b359c6d3ec3b45a",
"size": "1234",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "cloudaux/gcp/gce/network.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "243003"
}
],
"symlink_target": ""
}
|
import os
import unittest
import csv
import numpy
from __main__ import vtk, qt, ctk, slicer
from slicer.ScriptedLoadableModule import *
#
# NeedleGuideTemplate
#
class NeedleGuideTemplate(ScriptedLoadableModule):
"""Uses ScriptedLoadableModule base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def __init__(self, parent):
ScriptedLoadableModule.__init__(self, parent)
self.parent.title = "NeedleGuideTemplate" # TODO make this more human readable by adding spaces
self.parent.categories = ["IGT"]
self.parent.dependencies = []
self.parent.contributors = ["Junichi Tokuda (Brigham and Women's Hospital)"] # replace with "Firstname Lastname (Organization)"
self.parent.helpText = """
The NeedleGuideTemlpate module guides image-guided percutaneous interventions with needle-guide template.
The module calculates identify the needle guide hole and needle insertion depth to reach to the target
specified on the image.
"""
self.parent.acknowledgementText = """
This module is developed by Junichi Tokuda with support from NIH grants P41EB015898 (PI: Jolesz, Tempany) and R01CA111288 (PI: Tempany)
based on ScriptedLoadableModule template developed by Jean-Christophe Fillion-Robin, Kitware Inc. and Steve Pieper, Isomics, Inc.
and was partially funded by NIH grant 3P41RR013218-12S1.
"""
#
# NeedleGuideTemplateWidget
#
class NeedleGuideTemplateWidget(ScriptedLoadableModuleWidget):
"""Uses ScriptedLoadableModuleWidget base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setup(self):
ScriptedLoadableModuleWidget.setup(self)
# Instantiate and connect widgets ...
self.logic = NeedleGuideTemplateLogic(None)
#--------------------------------------------------
# For debugging
#
# Reload and Test area
reloadCollapsibleButton = ctk.ctkCollapsibleButton()
reloadCollapsibleButton.text = "Reload && Test"
self.layout.addWidget(reloadCollapsibleButton)
reloadFormLayout = qt.QFormLayout(reloadCollapsibleButton)
reloadCollapsibleButton.collapsed = True
# reload button
# (use this during development, but remove it when delivering
# your module to users)
self.reloadButton = qt.QPushButton("Reload")
self.reloadButton.toolTip = "Reload this module."
self.reloadButton.name = "NeedleGuideTemlpate Reload"
reloadFormLayout.addWidget(self.reloadButton)
self.reloadButton.connect('clicked()', self.onReload)
#
#--------------------------------------------------
#--------------------------------------------------
#
# Configuration
#
configCollapsibleButton = ctk.ctkCollapsibleButton()
configCollapsibleButton.text = "Configuration"
self.layout.addWidget(configCollapsibleButton)
configFormLayout = qt.QFormLayout(configCollapsibleButton)
configCollapsibleButton.collapsed = True
templateConfigPathLayout = qt.QHBoxLayout()
self.templateConfigPathEdit = qt.QLineEdit()
self.templateConfigPathEdit.text = ""
self.templateConfigPathEdit.readOnly = False
self.templateConfigPathEdit.frame = True
self.templateConfigPathEdit.styleSheet = "QLineEdit { background:transparent; }"
self.templateConfigPathEdit.cursor = qt.QCursor(qt.Qt.IBeamCursor)
templateConfigPathLayout.addWidget(self.templateConfigPathEdit)
self.templateConfigButton = qt.QPushButton("...")
self.templateConfigButton.toolTip = "Choose a template configuration file"
self.templateConfigButton.enabled = True
self.templateConfigButton.connect('clicked(bool)', self.onTemplateConfigButton)
templateConfigPathLayout.addWidget(self.templateConfigButton)
configFormLayout.addRow("Template Config File: ", templateConfigPathLayout)
fiducialConfigPathLayout = qt.QHBoxLayout()
self.fiducialConfigPathEdit = qt.QLineEdit()
self.fiducialConfigPathEdit.text = ""
self.fiducialConfigPathEdit.readOnly = False
self.fiducialConfigPathEdit.frame = True
self.fiducialConfigPathEdit.styleSheet = "QLineEdit { background:transparent; }"
self.fiducialConfigPathEdit.cursor = qt.QCursor(qt.Qt.IBeamCursor)
fiducialConfigPathLayout.addWidget(self.fiducialConfigPathEdit)
self.fiducialConfigButton = qt.QPushButton("...")
self.fiducialConfigButton.toolTip = "Choose a fiducial configuration file"
self.fiducialConfigButton.enabled = True
self.fiducialConfigButton.connect('clicked(bool)', self.onFiducialConfigButton)
fiducialConfigPathLayout.addWidget(self.fiducialConfigButton)
configFormLayout.addRow("Fiducial Config File: ", fiducialConfigPathLayout)
#
# Main Area
#
mainCollapsibleButton = ctk.ctkCollapsibleButton()
mainCollapsibleButton.text = "Main"
self.layout.addWidget(mainCollapsibleButton)
# Layout within the dummy collapsible button
#mainFormLayout = qt.QFormLayout(mainCollapsibleButton)
mainLayout = qt.QVBoxLayout(mainCollapsibleButton)
mainFormFrame = qt.QFrame()
mainFormLayout = qt.QFormLayout(mainFormFrame)
mainLayout.addWidget(mainFormFrame)
self.showTemplateCheckBox = qt.QCheckBox()
self.showTemplateCheckBox.checked = 0
self.showTemplateCheckBox.setToolTip("Show 3D model of the template")
mainFormLayout.addRow("Show Template:", self.showTemplateCheckBox)
self.showTemplateCheckBox.connect('toggled(bool)', self.onShowTemplate)
self.showFiducialCheckBox = qt.QCheckBox()
self.showFiducialCheckBox.checked = 0
self.showFiducialCheckBox.setToolTip("Show 3D model of the fiducial")
mainFormLayout.addRow("Show Fiducial:", self.showFiducialCheckBox)
self.showFiducialCheckBox.connect('toggled(bool)', self.onShowFiducial)
self.showTrajectoriesCheckBox = qt.QCheckBox()
self.showTrajectoriesCheckBox.checked = 0
self.showTrajectoriesCheckBox.setToolTip("Show 3D model of the fiducial")
mainFormLayout.addRow("Show Trajectories:", self.showTrajectoriesCheckBox)
self.showTrajectoriesCheckBox.connect('toggled(bool)', self.onShowTrajectories)
#
# input volume selector
#
self.targetFiducialsSelector = slicer.qMRMLNodeComboBox()
self.targetFiducialsSelector.nodeTypes = ( ("vtkMRMLMarkupsFiducialNode"), "" )
self.targetFiducialsSelector.selectNodeUponCreation = True
self.targetFiducialsSelector.addEnabled = True
self.targetFiducialsSelector.removeEnabled = True
self.targetFiducialsSelector.noneEnabled = False
self.targetFiducialsSelector.showHidden = False
self.targetFiducialsSelector.showChildNodeTypes = False
self.targetFiducialsSelector.setMRMLScene( slicer.mrmlScene )
self.targetFiducialsSelector.setToolTip( "Select Markups for targets" )
mainFormLayout.addRow("Targets: ", self.targetFiducialsSelector)
self.targetFiducialsNode = None
self.targetFiducialsSelector.connect("currentNodeChanged(vtkMRMLNode*)",
self.onFiducialsSelected)
#
# Target List Table
#
self.table = qt.QTableWidget(1, 4)
self.table.setSelectionBehavior(qt.QAbstractItemView.SelectRows)
self.table.setSelectionMode(qt.QAbstractItemView.SingleSelection)
#self.table.setSizePolicy(qt.QSizePolicy.Expanding, qt.QSizePolicy.Expanding)
self.headers = ["Name", "Hole", "Depth (mm)", "Position (RAS)"]
self.table.setHorizontalHeaderLabels(self.headers)
self.table.horizontalHeader().setStretchLastSection(True)
mainLayout.addWidget(self.table)
self.onFiducialsSelected()
##
## input volume selector
##
#self.inputSelector = slicer.qMRMLNodeComboBox()
#self.inputSelector.nodeTypes = ( ("vtkMRMLScalarVolumeNode"), "" )
#self.inputSelector.addAttribute( "vtkMRMLScalarVolumeNode", "LabelMap", 0 )
#self.inputSelector.selectNodeUponCreation = True
#self.inputSelector.addEnabled = False
#self.inputSelector.removeEnabled = False
#self.inputSelector.noneEnabled = False
#self.inputSelector.showHidden = False
#self.inputSelector.showChildNodeTypes = False
#self.inputSelector.setMRMLScene( slicer.mrmlScene )
#self.inputSelector.setToolTip( "Pick the input to the algorithm." )
#mainFormLayout.addRow("Input Volume: ", self.inputSelector)
##
## scale factor for screen shots
##
#self.screenshotScaleFactorSliderWidget = ctk.ctkSliderWidget()
#self.screenshotScaleFactorSliderWidget.singleStep = 1.0
#self.screenshotScaleFactorSliderWidget.minimum = 1.0
#self.screenshotScaleFactorSliderWidget.maximum = 50.0
#self.screenshotScaleFactorSliderWidget.value = 1.0
#self.screenshotScaleFactorSliderWidget.setToolTip("Set scale factor for the screen shots.")
#mainFormLayout.addRow("Screenshot scale factor", self.screenshotScaleFactorSliderWidget)
##
## Apply Button
##
#self.applyButton = qt.QPushButton("Apply")
#self.applyButton.toolTip = "Run the algorithm."
#self.applyButton.enabled = False
#mainFormLayout.addRow(self.applyButton)
# connections
#self.applyButton.connect('clicked(bool)', self.onApplyButton)
#self.inputSelector.connect("currentNodeChanged(vtkMRMLNode*)", self.onSelect)
# Add vertical spacer
self.layout.addStretch(1)
def cleanup(self):
pass
def updateTable(self):
print "updateTable() is called"
if not self.targetFiducialsNode:
self.table.clear()
self.table.setHorizontalHeaderLabels(self.headers)
else:
self.tableData = []
nOfControlPoints = self.targetFiducialsNode.GetNumberOfFiducials()
if self.table.rowCount != nOfControlPoints:
self.table.setRowCount(nOfControlPoints)
for i in range(nOfControlPoints):
label = self.targetFiducialsNode.GetNthFiducialLabel(i)
pos = [0.0, 0.0, 0.0]
self.targetFiducialsNode.GetNthFiducialPosition(i,pos)
(indexX, indexY, depth, inRange) = self.logic.computeNearestPath(pos)
posstr = '(%.3f, %.3f, %.3f)' % (pos[0], pos[1], pos[2])
cellLabel = qt.QTableWidgetItem(label)
cellIndex = qt.QTableWidgetItem('(%s, %s)' % (indexX, indexY))
cellDepth = None
if inRange:
cellDepth = qt.QTableWidgetItem('%.3f' % depth)
else:
cellDepth = qt.QTableWidgetItem('(%.3f)' % depth)
cellPosition = qt.QTableWidgetItem(posstr)
row = [cellLabel, cellIndex, cellDepth, cellPosition]
self.table.setItem(i, 0, row[0])
self.table.setItem(i, 1, row[1])
self.table.setItem(i, 2, row[2])
self.table.setItem(i, 3, row[3])
self.tableData.append(row)
self.table.show()
def onFiducialsSelected(self):
# Remove observer if previous node exists
if self.targetFiducialsNode and self.tag:
self.targetFiducialsNode.RemoveObserver(self.tag)
# Update selected node, add observer, and update control points
if self.targetFiducialsSelector.currentNode():
self.targetFiducialsNode = self.targetFiducialsSelector.currentNode()
self.tag = self.targetFiducialsNode.AddObserver('ModifiedEvent', self.onFiducialsUpdated)
self.updateTable()
def onFiducialsUpdated(self,caller,event):
if caller.IsA('vtkMRMLMarkupsFiducialNode') and event == 'ModifiedEvent':
self.updateTable()
def onSelect(self):
#self.applyButton.enabled = self.inputSelector.currentNode() and self.outputSelector.currentNode()
pass
def onApplyButton(self):
logic = NeedleGuideTemplateLogic()
enableScreenshotsFlag = self.enableScreenshotsFlagCheckBox.checked
#screenshotScaleFactor = int(self.screenshotScaleFactorSliderWidget.value)
print("Run the algorithm")
def onReload(self, moduleName="NeedleGuideTemplate"):
# Generic reload method for any scripted module.
# ModuleWizard will subsitute correct default moduleName.
globals()[moduleName] = slicer.util.reloadScriptedModule(moduleName)
def onTemplateConfigButton(self):
path = self.templateConfigPathEdit.text
path = qt.QFileDialog.getOpenFileName(None, 'Open Template File', path, '*.csv')
self.templateConfigPathEdit.setText(path)
self.logic.loadTemplateConfigFile(path)
self.updateTable()
def onFiducialConfigButton(self):
path = self.fiducialConfigPathEdit.text
filename = qt.QFileDialog.getOpenFileName(None, 'Open Fiducial File', path, '.csv')
self.fiducialConfigPathEdit.setText(path)
def onShowTemplate(self):
print "onShowTemplate(self)"
self.logic.setTemplateVisibility(self.showTemplateCheckBox.checked)
def onShowFiducial(self):
pass
def onShowTrajectories(self):
print "onTrajectories(self)"
self.logic.setNeedlePathVisibility(self.showTrajectoriesCheckBox.checked)
#
# NeedleGuideTemplateLogic
#
class NeedleGuideTemplateLogic(ScriptedLoadableModuleLogic):
"""This class should implement all the actual
computation done by your module. The interface
should be such that other python code can import
this class and make use of the functionality without
requiring an instance of the Widget.
Uses ScriptedLoadableModuleLogic base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def __init__(self, parent):
ScriptedLoadableModuleLogic.__init__(self, parent)
self.fiducialName = ''
self.fiducialConfig = []
self.templateName = ''
self.templateConfig = []
self.templateIndex = []
self.templateMaxDepth = []
self.templateModelNodeID = ''
self.needlePathModelNodeID = ''
self.templatePathOrigins = [] ## Origins of needle paths
self.templatePathVectors = [] ## Normal vectors of needle paths
self.pathOrigins = [] ## Origins of needle paths (after transformation by parent transform node)
self.pathVectors = [] ## Normal vectors of needle paths (after transformation by parent transform node)
def loadFiducialConfigFile(self, path):
reader = csv.reader(open(path, 'rb'))
def loadTemplateConfigFile(self, path):
self.templateIndex = []
self.templateConfig = []
header = False
reader = csv.reader(open(path, 'rb'))
try:
for row in reader:
if header:
self.templateIndex.append(row[0:2])
self.templateConfig.append([float(row[2]), float(row[3]), float(row[4]),
float(row[5]), float(row[6]), float(row[7]),
float(row[8])])
else:
self.templateName = row[0]
header = True
except csv.Error as e:
print('file %s, line %d: %s' % (filename, reader.line_num, e))
self.createTemplateModel()
self.setTemplateVisibility(0)
self.setNeedlePathVisibility(0)
self.updateTemplateVectors()
def createTemplateModel(self):
self.templatePathVectors = []
self.templatePathOrigins = []
tempModelNode = slicer.mrmlScene.GetNodeByID(self.templateModelNodeID)
if tempModelNode == None:
tempModelNode = slicer.vtkMRMLModelNode()
tempModelNode.SetName('NeedleGuideTemplate')
slicer.mrmlScene.AddNode(tempModelNode)
self.templateModelNodeID = tempModelNode.GetID()
dnode = slicer.vtkMRMLModelDisplayNode()
#dnode.SetColor(self.ModelColor)
slicer.mrmlScene.AddNode(dnode)
tempModelNode.SetAndObserveDisplayNodeID(dnode.GetID())
self.modelNodetag = tempModelNode.AddObserver(slicer.vtkMRMLTransformableNode.TransformModifiedEvent,
self.onTemplateTransformUpdated)
pathModelNode = slicer.mrmlScene.GetNodeByID(self.needlePathModelNodeID)
if pathModelNode == None:
pathModelNode = slicer.vtkMRMLModelNode()
pathModelNode.SetName('NeedleGuideNeedlePath')
slicer.mrmlScene.AddNode(pathModelNode)
self.needlePathModelNodeID = pathModelNode.GetID()
dnode = slicer.vtkMRMLModelDisplayNode()
slicer.mrmlScene.AddNode(dnode)
pathModelNode.SetAndObserveDisplayNodeID(dnode.GetID())
pathModelAppend = vtk.vtkAppendPolyData()
tempModelAppend = vtk.vtkAppendPolyData()
for row in self.templateConfig:
p1 = numpy.array(row[0:3])
p2 = numpy.array(row[3:6])
tempLineSource = vtk.vtkLineSource()
tempLineSource.SetPoint1(p1)
tempLineSource.SetPoint2(p2)
tempTubeFilter = vtk.vtkTubeFilter()
tempTubeFilter.SetInputConnection(tempLineSource.GetOutputPort())
tempTubeFilter.SetRadius(1.0)
tempTubeFilter.SetNumberOfSides(18)
tempTubeFilter.CappingOn()
tempTubeFilter.Update()
pathLineSource = vtk.vtkLineSource()
v = p2-p1
nl = numpy.linalg.norm(v)
n = v/nl # normal vector
l = row[6]
p3 = p1 + l * n
pathLineSource.SetPoint1(p1)
pathLineSource.SetPoint2(p3)
self.templatePathOrigins.append([row[0], row[1], row[2], 1.0])
self.templatePathVectors.append([n[0], n[1], n[2], 1.0])
self.templateMaxDepth.append(row[6])
pathTubeFilter = vtk.vtkTubeFilter()
pathTubeFilter.SetInputConnection(pathLineSource.GetOutputPort())
pathTubeFilter.SetRadius(0.8)
pathTubeFilter.SetNumberOfSides(18)
pathTubeFilter.CappingOn()
pathTubeFilter.Update()
if vtk.VTK_MAJOR_VERSION <= 5:
tempModelAppend.AddInput(tempTubeFilter.GetOutput());
pathModelAppend.AddInput(pathTubeFilter.GetOutput());
else:
tempModelAppend.AddInputData(tempTubeFilter.GetOutput());
pathModelAppend.AddInputData(pathTubeFilter.GetOutput());
tempModelAppend.Update()
tempModelNode.SetAndObservePolyData(tempModelAppend.GetOutput())
pathModelAppend.Update()
pathModelNode.SetAndObservePolyData(pathModelAppend.GetOutput())
def setModelVisibilityByID(self, id, visible):
mnode = slicer.mrmlScene.GetNodeByID(id)
if mnode != None:
dnode = mnode.GetDisplayNode()
if dnode != None:
dnode.SetVisibility(visible)
def setModelSliceIntersectionVisibilityByID(self, id, visible):
mnode = slicer.mrmlScene.GetNodeByID(id)
if mnode != None:
dnode = mnode.GetDisplayNode()
if dnode != None:
dnode.SetSliceIntersectionVisibility(visible)
def setTemplateVisibility(self, visibility):
self.setModelVisibilityByID(self.templateModelNodeID, visibility)
def setNeedlePathVisibility(self, visibility):
self.setModelVisibilityByID(self.needlePathModelNodeID, visibility)
self.setModelSliceIntersectionVisibilityByID(self.needlePathModelNodeID, visibility)
#def onFiducialsUpdated(self,caller,event):
def onTemplateTransformUpdated(self,caller,event):
print 'onTemplateTransformUpdated()'
self.updateTemplateVectors()
def updateTemplateVectors(self):
print 'updateTemplateVectors()'
mnode = slicer.mrmlScene.GetNodeByID(self.templateModelNodeID)
if mnode == None:
return 0
tnode = mnode.GetParentTransformNode()
trans = vtk.vtkMatrix4x4()
if tnode != None:
tnode.GetMatrixTransformToWorld(trans);
else:
trans.Identity()
# Calculate offset
zero = [0.0, 0.0, 0.0, 1.0]
offset = []
offset = trans.MultiplyDoublePoint(zero)
self.pathOrigins = []
self.pathVectors = []
i = 0
for orig in self.templatePathOrigins:
torig = trans.MultiplyDoublePoint(orig)
self.pathOrigins.append(numpy.array(torig[0:3]))
vec = self.templatePathVectors[i]
tvec = trans.MultiplyDoublePoint(vec)
self.pathVectors.append(numpy.array([tvec[0]-offset[0], tvec[1]-offset[1], tvec[2]-offset[2]]))
i = i + 1
def computeNearestPath(self, pos):
# Identify the nearest path and return the index for self.templateConfig[] and depth
# (index_x, index_y, depth, inRange) = computeNearestPath()
p = numpy.array(pos)
minMag2 = numpy.Inf
minDepth = 0.0
minIndex = -1
## TODO: Can following loop can be described by matrix calculation?
i = 0
for orig in self.pathOrigins:
vec = self.pathVectors[i]
op = p - orig
aproj = numpy.inner(op, vec)
perp = op-aproj*vec
mag2 = numpy.vdot(perp,perp) # magnitude^2
if mag2 < minMag2:
minMag2 = mag2
minIndex = i
minDepth = aproj
i = i + 1
indexX = '--'
indexY = '--'
inRange = False
if minIndex >= 0:
indexX = self.templateIndex[minIndex][0]
indexY = self.templateIndex[minIndex][1]
if minDepth > 0 and minDepth < self.templateMaxDepth[minIndex]:
inRange = True
return (indexX, indexY, minDepth, inRange)
class NeedleGuideTemplateTest(ScriptedLoadableModuleTest):
"""
This is the test case for your scripted Uses.
module ScriptedLoadableModuleTest base class, available at:
https://github.com/Slicer/Slicer/blob/master/Base/Python/slicer/ScriptedLoadableModule.py
"""
def setUp(self):
""" Do whatever is needed to reset the state - typically a scene clear will be enough.
"""
slicer.mrmlScene.Clear(0)
def runTest(self):
"""Run as few or as many tests as needed here.
"""
self.setUp()
self.test_NeedleGuideTemplate1()
def test_NeedleGuideTemplate1(self):
""" Ideally you should have several levels of tests. At the lowest level
tests sould exercise the functionality of the logic with different inputs
(both valid and invalid). At higher levels your tests should emulate the
way the user would interact with your code and confirm that it still works
the way you intended.
One of the most important features of the tests is that it should alert other
developers when their changes will have an impact on the behavior of your
module. For example, if a developer removes a feature that you depend on,
your test should break so they know that the feature is needed.
"""
self.delayDisplay("Starting the test")
#
# first, get some data
#
import urllib
downloads = (
('http://slicer.kitware.com/midas3/download?items=5767', 'FA.nrrd', slicer.util.loadVolume),
)
for url,name,loader in downloads:
filePath = slicer.app.temporaryPath + '/' + name
if not os.path.exists(filePath) or os.stat(filePath).st_size == 0:
print('Requesting download %s from %s...\n' % (name, url))
urllib.urlretrieve(url, filePath)
if loader:
print('Loading %s...\n' % (name,))
loader(filePath)
self.delayDisplay('Finished with download and loading\n')
volumeNode = slicer.util.getNode(pattern="FA")
logic = NeedleGuideTemplateLogic()
self.assertTrue( logic.hasImageData(volumeNode) )
self.delayDisplay('Test passed!')
|
{
"content_hash": "23cbf6b175c6b5bd37b532536de1994f",
"timestamp": "",
"source": "github",
"line_count": 616,
"max_line_length": 139,
"avg_line_length": 37.1948051948052,
"alnum_prop": 0.708711592178771,
"repo_name": "ProstateBRP/NeedleGuideTemplate",
"id": "2d75de040380d046e8b874b3cfb18dac819f3918",
"size": "22912",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "NeedleGuideTemplate/NeedleGuideTemplate.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CMake",
"bytes": "90"
},
{
"name": "Python",
"bytes": "22912"
}
],
"symlink_target": ""
}
|
from project.utils import auth
from flask import Blueprint, request, redirect, Response, abort
from project import db, session
from project.models import User, Group
from datetime import datetime, date
import json
import jsonpickle
import project.constants as constants
from werkzeug.exceptions import NotFound
bathing = Blueprint('bathing', __name__)
@bathing.route('/api/start_bathing', methods=['POST'])
def start_bathing():
user_id = request.json['user_id']
group_id = request.json['group_id']
try:
user = User.query.get_or_404((user_id, group_id))
except NotFound:
users = User.query.filter_by(id=user_id).all()
if len(users) == 0:
abort(404)
elif len(users) != 1:
abort(400)
else:
user = users[0]
group_id = user.groupID
group = Group.query.get_or_404(group_id)
now = datetime.now()
if group.currentBathingID == user_id:
return json.dumps({"other_person": False, "same_person": True})
elif group.currentBathingID != constants.DEFAULT_BATHING_ID:
return json.dumps({"other_person": True, "same_person": False})
else:
group.currentBathingID = user_id
group.currentBathingStart = now
db.session.commit()
return json.dumps({"other_person": False, "same_person": False})
@bathing.route('/api/stop_bathing', methods=['POST'])
def stop_bathing():
user_id = request.json['user_id']
group_id = request.json['group_id']
try:
user = User.query.get_or_404((user_id, group_id))
except NotFound:
users = User.query.filter_by(id=user_id).all()
if len(users) == 0:
abort(404)
elif len(users) != 1:
abort(400)
else:
user = users[0]
group_id = user.groupID
group = Group.query.get_or_404(group_id)
if group.currentBathingID == constants.DEFAULT_BATHING_ID:
return json.dumps({'bathing': False})
elif group.currentBathingID != user_id:
return json.dumps({'bathing': True, 'correct_person': False})
else:
group.currentBathingID = constants.DEFAULT_BATHING_ID
group.currentBathingStart = None
db.session.commit()
return json.dumps({'bathing': True, 'correct_person': True})
@bathing.route('/api/check_bathing', methods=['POST'])
def check_bathing():
user_id = request.json['user_id']
group_id = request.json['group_id']
try:
user = User.query.get_or_404((user_id, group_id))
except NotFound:
users = User.query.filter_by(id=user_id).all()
# not registered
if len(users) == 0:
abort(404)
# in multiple groups
elif len(users) != 1:
abort(400)
else:
user = users[0]
group_id = user.groupID
group = Group.query.get_or_404(group_id)
if group.currentBathingID == constants.DEFAULT_BATHING_ID:
result = {"bathing": False}
else:
user = User.query.filter_by(id=group.currentBathingID, groupID=group_id).first()
result = {"bathing": True,
"username": user.username,
"start": group.currentBathingStart.strftime('%H:%M')}
return jsonpickle.encode(result)
|
{
"content_hash": "f8a3f249cc4417a276ad6112565bb704",
"timestamp": "",
"source": "github",
"line_count": 98,
"max_line_length": 88,
"avg_line_length": 33.41836734693877,
"alnum_prop": 0.6109923664122138,
"repo_name": "lingxz/dinnercore",
"id": "605b48e213dec1ac62c7cbc79956cfc49663e6cc",
"size": "3275",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "project/bathing/bathing.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Python",
"bytes": "42434"
},
{
"name": "Shell",
"bytes": "33"
}
],
"symlink_target": ""
}
|
import os
import binascii
import time
import random
from threading import Event
from traceback import print_exc
from ACEStream.Core.simpledefs import *
from ACEStream.Core.DirectDownload.Storage import Storage
from ACEStream.Core.DirectDownload.Downloader import Downloader
from ACEStream.Core.DirectDownload.VODTransporter import VODTransporter
from ACEStream.Core.Utilities.logger import log, log_exc
DEBUG = False
class Statistics_Response:
pass
class DirectDownload:
def __init__(self, main_url, download_url, dlhash, config, multihandler, fileinfo, resumedata, vodeventcallback, set_error_func, finished_func, failed_func):
self.main_url = main_url
self.download_url = download_url
self.dlhash = dlhash
self.config = config
self.dlmode = config['mode']
self.fileinfo = fileinfo
self.vodeventcallback = vodeventcallback
self.set_error_func = set_error_func
self.finished_func = finished_func
self.failed_func = failed_func
self.download_id = binascii.hexlify(self.dlhash) + '-' + str(long(time.time())) + '-' + str(random.randint(0, 100000))
self.dldoneflag = Event()
self.rawserver = multihandler.newRawServer(dlhash, self.dldoneflag)
if download_url is not None:
url = download_url
else:
url = main_url
self.downloader = Downloader(url, dlhash, self.rawserver, self.failed)
self.voddownload = None
self.storage = None
self.log_prefix = 'dd::' + binascii.hexlify(self.dlhash) + ':'
predownload = self.config.get('predownload', False)
if DEBUG:
log(self.log_prefix + '__init__: predownload', predownload)
if resumedata is None and predownload:
self.downloader.predownload(self.init_predownloaded)
else:
callback = lambda content_length, mimetype: self.init(resumedata, content_length, mimetype)
self.downloader.init(callback)
def init_predownloaded(self, mimetype, filedata):
if DEBUG:
log(self.log_prefix + 'init_predownloaded: mimetype', mimetype, 'len', len(filedata))
if self.dldoneflag.is_set():
if DEBUG:
log(self.log_prefix + 'init_predownloaded: done flag is set, exit')
return
ext = self.guess_extension_from_mimetype(mimetype)
filename = binascii.hexlify(self.dlhash)
if len(ext):
filename += '.' + ext
content_length = len(filedata)
self.fileinfo['filename'] = filename
self.fileinfo['size'] = content_length
self.fileinfo['mimetype'] = mimetype
temp_dir = os.path.join(self.config['buffer_dir'], binascii.hexlify(self.dlhash))
if not os.path.isdir(temp_dir):
os.mkdir(temp_dir)
self.storage = Storage(self.dlhash, self.config, self.fileinfo, temp_dir, None, self.finished_callback, filedata=filedata)
self.downloader.set_storage(self.storage)
self.finished_callback()
if self.dlmode == DLMODE_VOD:
if DEBUG:
log(self.log_prefix + 'init_predownloaded: starting in vod mode, but download is finished: fileinfo', self.fileinfo)
self.vodeventcallback(self.fileinfo, VODEVENT_START, {'complete': True,
'filename': self.storage.get_dest_path(),
'mimetype': self.fileinfo['mimetype'],
'stream': None,
'length': self.storage.get_content_length(),
'bitrate': self.fileinfo['bitrate']})
def init(self, resumedata = None, content_length = None, mimetype = None):
if DEBUG:
log(self.log_prefix + 'init: resumedata', resumedata, 'content_length', content_length, 'mimetype', mimetype)
if self.dldoneflag.is_set():
if DEBUG:
log(self.log_prefix + 'init: done flag is set, exit')
return
if content_length is None:
content_length, mimetype = self.downloader.init()
if resumedata is not None:
if content_length != resumedata['size']:
raise Exception('content length differs from resumedata')
if mimetype != resumedata['mimetype']:
raise Exception('mime type differs from resumedata')
filename = resumedata['filename']
duration = resumedata.get('duration', None)
if duration:
bitrate = content_length / duration
self.fileinfo['duration'] = duration
self.fileinfo['bitrate'] = bitrate
if DEBUG:
log(self.log_prefix + '__init__: got duration from resumedata: main_url', self.main_url, 'duration', duration, 'bitrate', bitrate)
else:
ext = self.guess_extension_from_mimetype(mimetype)
filename = binascii.hexlify(self.dlhash)
if len(ext):
filename += '.' + ext
self.fileinfo['filename'] = filename
self.fileinfo['size'] = content_length
self.fileinfo['mimetype'] = mimetype
temp_dir = os.path.join(self.config['buffer_dir'], binascii.hexlify(self.dlhash))
if not os.path.isdir(temp_dir):
os.mkdir(temp_dir)
self.storage = Storage(self.dlhash, self.config, self.fileinfo, temp_dir, resumedata, self.finished_callback)
self.downloader.set_storage(self.storage)
completed = self.storage.is_finished()
if completed:
self.finished_callback()
if self.dlmode == DLMODE_VOD:
if completed:
if DEBUG:
log(self.log_prefix + '__init__: starting in vod mode, but download is finished: fileinfo', self.fileinfo)
self.vodeventcallback(self.fileinfo, VODEVENT_START, {'complete': True,
'filename': self.storage.get_dest_path(),
'mimetype': self.fileinfo['mimetype'],
'stream': None,
'length': self.storage.get_content_length(),
'bitrate': self.fileinfo['bitrate']})
else:
if DEBUG:
log(self.log_prefix + '__init__: starting in vod mode: fileinfo', self.fileinfo)
self.voddownload = VODTransporter(self, self.dlhash, self.fileinfo, self.vodeventcallback)
self.storage.add_got_data_observer(self.voddownload.got_data_observer)
if not completed:
self.downloader.start()
def get_download_id(self):
return self.download_id
def guess_extension_from_mimetype(self, mimetype):
if mimetype is None:
mimetype = ''
if mimetype == 'video/x-msvideo':
ext = 'avi'
elif mimetype == 'video/mp4':
ext = 'mp4'
elif mimetype == 'video/x-matroska':
ext = 'mkv'
elif mimetype == 'video/x-m4v':
ext = 'm4v'
elif mimetype == 'video/quicktime':
ext = 'mov'
elif mimetype == 'video/x-sgi-movie':
ext = 'movie'
elif mimetype == 'video/mpeg':
ext = 'mpg'
elif mimetype == 'application/ogg' or mimetype == 'video/ogg':
ext = 'ogg'
elif mimetype == 'video/x-flv':
ext = 'flv'
elif mimetype == 'video/webm':
ext = 'webm'
elif mimetype == 'video/x-ms-wmv':
ext = 'wmv'
else:
if DEBUG:
log(self.log_prefix + 'guess_extension_from_mimetype: unknown mimetype', mimetype)
ext = 'mpg'
if DEBUG:
log(self.log_prefix + 'guess_extension_from_mimetype: mimetype', mimetype, 'ext', ext)
return ext
def finished_callback(self):
if DEBUG:
log(self.log_prefix + 'finished_callback: url', self.main_url)
def _finished():
if self.voddownload is not None:
self.voddownload.complete()
if self.finished_func is not None:
self.finished_func(self.main_url, self.download_url, self.dlhash, self.fileinfo)
self.rawserver.add_task(_finished, 0.0)
def failed(self, err):
if DEBUG:
log(self.log_prefix + 'failed: url', self.main_url, 'err', err)
if self.voddownload is not None:
self.voddownload.shutdown()
self.voddownload = None
self.set_error_func(err)
def _failed():
if self.failed_func is not None:
try:
self.failed_func(err)
except:
if DEBUG:
print_exc()
self.rawserver.add_task(_failed, 0.0)
def got_duration(self, duration, from_player = True):
if DEBUG:
log(self.log_prefix + 'got_duration: main_url', self.main_url, 'duration', duration, 'fileinfo', self.fileinfo)
if duration <= 0:
if DEBUG:
log(self.log_prefix + 'got_duration: bad duration')
return
cur_duration = self.fileinfo.get('duration', None)
if cur_duration is not None:
if cur_duration != duration:
if DEBUG:
log(self.log_prefix + 'got_duration: duration does not match with metadata: main_url', self.main_url, 'cur_duration', cur_duration, 'duration', duration)
else:
bitrate = self.fileinfo['size'] / duration
self.fileinfo['duration'] = duration
self.fileinfo['bitrate'] = bitrate
if self.voddownload is not None:
self.voddownload.set_bitrate(bitrate)
def get_stats(self):
status = None
stats = {}
s = Statistics_Response()
s.numSeeds = 0
s.numPeers = 0
s.httpSeeds = 0
s.upTotal = 0
s.downTotal = self.downloader.measure.get_total()
s.httpDownTotal = s.downTotal
stats['stats'] = s
stats['up'] = 0
if self.storage is None:
stats['frac'] = 0
finished = False
else:
stats['frac'] = self.storage.get_progress()
finished = self.storage.is_finished()
if finished:
stats['vod_prebuf_frac'] = 1.0
stats['vod_playable'] = True
stats['vod_playable_after'] = 0.0
stats['vod'] = False
stats['down'] = 0
stats['httpdown'] = 0
else:
s.numSeeds = 1
s.httpSeeds = 1
stats['down'] = self.downloader.measure.get_rate()
stats['httpdown'] = stats['down']
if self.voddownload is not None:
stats['vod_prebuf_frac'] = self.voddownload.get_prebuffering_progress()
stats['vod_playable'] = self.voddownload.is_playable()
stats['vod_playable_after'] = self.voddownload.get_playable_after()
stats['vod'] = True
else:
stats['vod_prebuf_frac'] = 0.0
stats['vod_playable'] = False
stats['vod_playable_after'] = float(2147483648L)
stats['vod'] = False
return (status, stats)
def shutdown(self):
if self.voddownload is not None:
self.voddownload.shutdown()
self.voddownload = None
self.downloader.shutdown()
if self.storage is not None:
self.storage.close()
self.storage = None
self.dldoneflag.set()
self.rawserver.shutdown()
return self.checkpoint()
def restart(self, dlmode, vodeventfunc, finished_func, failed_func):
self.dlmode = dlmode
self.fileinfo['usercallback'] = vodeventfunc
self.finished_func = finished_func
self.failed_func = failed_func
if self.storage is None:
try:
self.init()
except Exception as e:
if DEBUG:
print_exc()
self.failed(e)
return
if dlmode == DLMODE_VOD:
if self.storage.is_finished():
if DEBUG:
log(self.log_prefix + 'restart: restart in vod mode requested, but download is finished: fileinfo', self.fileinfo)
self.vodeventcallback(self.fileinfo, VODEVENT_START, {'complete': True,
'filename': self.storage.get_dest_path(),
'mimetype': self.fileinfo['mimetype'],
'stream': None,
'length': self.storage.get_content_length(),
'bitrate': self.fileinfo['bitrate']})
else:
if self.voddownload is not None:
self.storage.remove_got_data_observer(self.voddownload.got_data_observer)
self.voddownload.shutdown()
self.voddownload = None
if DEBUG:
log(self.log_prefix + 'restart: voddownload is not None, stop current download')
self.voddownload = VODTransporter(self, self.dlhash, self.fileinfo, self.vodeventcallback)
self.storage.add_got_data_observer(self.voddownload.got_data_observer)
if not self.downloader.is_running():
if DEBUG:
log(self.log_prefix + 'restart: downloader is not running, start it')
self.downloader.start()
def checkpoint(self):
if self.storage is None:
return
resumedata = self.storage.checkpoint()
resumedata['mimetype'] = self.fileinfo['mimetype']
resumedata['filename'] = self.fileinfo['filename']
resumedata['duration'] = self.fileinfo['duration']
return resumedata
def get_dest_path(self):
if self.storage is None:
return
return self.storage.get_dest_path()
def get_content_length(self):
if self.storage is None:
return
return self.storage.get_content_length()
def set_wait_sufficient_speed(self, value):
if self.voddownload is not None:
self.voddownload.set_wait_sufficient_speed(value)
def set_player_buffer_time(self, value):
if self.voddownload is not None:
self.voddownload.set_player_buffer_time(value)
def set_live_buffer_time(self, value):
if self.voddownload is not None:
self.voddownload.set_live_buffer_time(value)
|
{
"content_hash": "6f08691e0a371e06b73d228144021d8f",
"timestamp": "",
"source": "github",
"line_count": 344,
"max_line_length": 173,
"avg_line_length": 42.026162790697676,
"alnum_prop": 0.5737704918032787,
"repo_name": "alesnav/p2ptv-pi",
"id": "bd29020f818aed69a7d1ff942b161e5a1f5cf005",
"size": "14530",
"binary": false,
"copies": "4",
"ref": "refs/heads/master",
"path": "acestream/ACEStream/Core/APIImplementation/DirectDownload.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "2612089"
},
{
"name": "Shell",
"bytes": "8995"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
from envisage.ui.workbench.workbench_action_set import *
|
{
"content_hash": "6c9239384634cb8ab053e733d6d5f9ba",
"timestamp": "",
"source": "github",
"line_count": 2,
"max_line_length": 56,
"avg_line_length": 48,
"alnum_prop": 0.8020833333333334,
"repo_name": "enthought/etsproxy",
"id": "8c938f334afc0d20e46019f04f6ab4ca87d5cae2",
"size": "111",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "enthought/envisage/ui/workbench/workbench_action_set.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "363714"
}
],
"symlink_target": ""
}
|
from django.contrib.auth.hashers import check_password
from django.contrib.auth.models import User
from testbase.unit import UnitTestCase
class TestCreateAdminUser(UnitTestCase):
def setUp(self):
super().setUp()
name = self.randStr()
self.createSuperUser(userName=name)
self.user = User.objects.get(username=name)
def test_setsStaffFlag(self):
self.assertTrue(self.user.is_staff)
def test_setsSuperuserFlag(self):
self.assertTrue(self.user.is_superuser)
|
{
"content_hash": "9c7c2a42f66aa909e62265a3e5a73adc",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 54,
"avg_line_length": 28.833333333333332,
"alnum_prop": 0.7129094412331407,
"repo_name": "tctimmeh/django-testing-base",
"id": "f29cde1b41f27249e06ffa64cc33c2e55d7db7e5",
"size": "519",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "testsite/testapp/tests/base/testCreateSuperUser.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Makefile",
"bytes": "444"
},
{
"name": "Python",
"bytes": "25952"
}
],
"symlink_target": ""
}
|
"""The tests for the counter component."""
# pylint: disable=protected-access
import asyncio
import logging
from homeassistant.components.counter import (
CONF_ICON,
CONF_INITIAL,
CONF_NAME,
CONF_RESTORE,
CONF_STEP,
DOMAIN,
)
from homeassistant.const import ATTR_FRIENDLY_NAME, ATTR_ICON
from homeassistant.core import Context, CoreState, State
from homeassistant.setup import async_setup_component
from tests.common import mock_restore_cache
from tests.components.counter.common import (
async_decrement,
async_increment,
async_reset,
)
_LOGGER = logging.getLogger(__name__)
async def test_config(hass):
"""Test config."""
invalid_configs = [None, 1, {}, {"name with space": None}]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_config_options(hass):
"""Test configuration options."""
count_start = len(hass.states.async_entity_ids())
_LOGGER.debug("ENTITIES @ start: %s", hass.states.async_entity_ids())
config = {
DOMAIN: {
"test_1": {},
"test_2": {
CONF_NAME: "Hello World",
CONF_ICON: "mdi:work",
CONF_INITIAL: 10,
CONF_RESTORE: False,
CONF_STEP: 5,
},
}
}
assert await async_setup_component(hass, "counter", config)
await hass.async_block_till_done()
_LOGGER.debug("ENTITIES: %s", hass.states.async_entity_ids())
assert count_start + 2 == len(hass.states.async_entity_ids())
await hass.async_block_till_done()
state_1 = hass.states.get("counter.test_1")
state_2 = hass.states.get("counter.test_2")
assert state_1 is not None
assert state_2 is not None
assert 0 == int(state_1.state)
assert ATTR_ICON not in state_1.attributes
assert ATTR_FRIENDLY_NAME not in state_1.attributes
assert 10 == int(state_2.state)
assert "Hello World" == state_2.attributes.get(ATTR_FRIENDLY_NAME)
assert "mdi:work" == state_2.attributes.get(ATTR_ICON)
async def test_methods(hass):
"""Test increment, decrement, and reset methods."""
config = {DOMAIN: {"test_1": {}}}
assert await async_setup_component(hass, "counter", config)
entity_id = "counter.test_1"
state = hass.states.get(entity_id)
assert 0 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 1 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 2 == int(state.state)
async_decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 1 == int(state.state)
async_reset(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 0 == int(state.state)
async def test_methods_with_config(hass):
"""Test increment, decrement, and reset methods with configuration."""
config = {
DOMAIN: {"test": {CONF_NAME: "Hello World", CONF_INITIAL: 10, CONF_STEP: 5}}
}
assert await async_setup_component(hass, "counter", config)
entity_id = "counter.test"
state = hass.states.get(entity_id)
assert 10 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 15 == int(state.state)
async_increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 20 == int(state.state)
async_decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 15 == int(state.state)
@asyncio.coroutine
def test_initial_state_overrules_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass, (State("counter.test1", "11"), State("counter.test2", "-22"))
)
hass.state = CoreState.starting
yield from async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test1": {CONF_RESTORE: False},
"test2": {CONF_INITIAL: 10, CONF_RESTORE: False},
}
},
)
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 0
state = hass.states.get("counter.test2")
assert state
assert int(state.state) == 10
@asyncio.coroutine
def test_restore_state_overrules_initial_state(hass):
"""Ensure states are restored on startup."""
attr = {"initial": 6, "minimum": 1, "maximum": 8, "step": 2}
mock_restore_cache(
hass,
(
State("counter.test1", "11"),
State("counter.test2", "-22"),
State("counter.test3", "5", attr),
),
)
hass.state = CoreState.starting
yield from async_setup_component(
hass, DOMAIN, {DOMAIN: {"test1": {}, "test2": {CONF_INITIAL: 10}, "test3": {}}}
)
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 11
state = hass.states.get("counter.test2")
assert state
assert int(state.state) == -22
state = hass.states.get("counter.test3")
assert state
assert int(state.state) == 5
assert state.attributes.get("initial") == 6
assert state.attributes.get("minimum") == 1
assert state.attributes.get("maximum") == 8
assert state.attributes.get("step") == 2
@asyncio.coroutine
def test_no_initial_state_and_no_restore_state(hass):
"""Ensure that entity is create without initial and restore feature."""
hass.state = CoreState.starting
yield from async_setup_component(hass, DOMAIN, {DOMAIN: {"test1": {CONF_STEP: 5}}})
state = hass.states.get("counter.test1")
assert state
assert int(state.state) == 0
async def test_counter_context(hass, hass_admin_user):
"""Test that counter context works."""
assert await async_setup_component(hass, "counter", {"counter": {"test": {}}})
state = hass.states.get("counter.test")
assert state is not None
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_counter_min(hass, hass_admin_user):
"""Test that min works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"minimum": "0", "initial": "0"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
await hass.services.async_call(
"counter",
"decrement",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "0"
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "1"
async def test_counter_max(hass, hass_admin_user):
"""Test that max works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"maximum": "0", "initial": "0"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
await hass.services.async_call(
"counter",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "0"
await hass.services.async_call(
"counter",
"decrement",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("counter.test")
assert state2 is not None
assert state2.state == "-1"
async def test_configure(hass, hass_admin_user):
"""Test that setting values through configure works."""
assert await async_setup_component(
hass, "counter", {"counter": {"test": {"maximum": "10", "initial": "10"}}}
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "10"
assert 10 == state.attributes.get("maximum")
# update max
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "maximum": 0},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
assert 0 == state.attributes.get("maximum")
# disable max
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "maximum": None},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "0"
assert state.attributes.get("maximum") is None
# update min
assert state.attributes.get("minimum") is None
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "minimum": 5},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 5 == state.attributes.get("minimum")
# disable min
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "minimum": None},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert state.attributes.get("minimum") is None
# update step
assert 1 == state.attributes.get("step")
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "step": 3},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 3 == state.attributes.get("step")
# update value
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "value": 6},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "6"
# update initial
await hass.services.async_call(
"counter",
"configure",
{"entity_id": state.entity_id, "initial": 5},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "6"
assert 5 == state.attributes.get("initial")
# update all
await hass.services.async_call(
"counter",
"configure",
{
"entity_id": state.entity_id,
"step": 5,
"minimum": 0,
"maximum": 9,
"value": 5,
"initial": 6,
},
True,
Context(user_id=hass_admin_user.id),
)
state = hass.states.get("counter.test")
assert state is not None
assert state.state == "5"
assert 5 == state.attributes.get("step")
assert 0 == state.attributes.get("minimum")
assert 9 == state.attributes.get("maximum")
assert 6 == state.attributes.get("initial")
|
{
"content_hash": "76ab9d06256ac78c00b3c36cb37fde15",
"timestamp": "",
"source": "github",
"line_count": 448,
"max_line_length": 87,
"avg_line_length": 26.770089285714285,
"alnum_prop": 0.6059367964646043,
"repo_name": "leppa/home-assistant",
"id": "35512129aedccf6c047f0d480322270b6e8f12ef",
"size": "11993",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "tests/components/counter/test_init.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "18957740"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012, Dongsheng Cai
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the Dongsheng Cai nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL DONGSHENG CAI BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
import xml.etree.ElementTree as ET
try:
from cStringIO import StringIO
except ImportError:
from io import StringIO
import eventlet
requests = eventlet.import_patched('requests.__init__')
try:
register_namespace = ET.register_namespace
except AttributeError:
def register_namespace(prefix, uri):
ET._namespace_map[uri] = prefix
class WNSException(Exception):
pass
class WNSInvalidPushTypeException(WNSException):
def __init__(self, pntype):
Exception.__init__(self, "WNS Invalid push notification type :" + pntype)
WNSACCESSTOKEN_URL = 'https://login.live.com/accesstoken.srf'
class WNSClient():
def __init__(self, params):
self.clientid = params['wnsclientid']
self.clientsecret = params['wnsclientsecret']
self.timeout = params['timeout'] if 'timeout' in params else None
self.tokenexpiry = None
self.accesstoken = None
def process(self, **kwargs):
url = kwargs['token']
wnsparams = kwargs['message']
wnstype = wnsparams.get('type', 'toast')
# Check if we need to get an initial or new token
now = int(time.time())
if not self.accesstoken or now >= self.tokenexpiry:
self.request_token()
if wnstype == 'toast':
wnsparams.setdefault('template', 'ToastText02')
wns = WNSToast(accesstoken=self.accesstoken, timeout=self.timeout)
elif wnstype == 'tile':
wnsparams.setdefault('template', 'TileSquare150x150Text01')
wns = WNSTile(accesstoken=self.accesstoken, timeout=self.timeout)
elif wnstype == 'badge':
wnsparams.setdefault('badge', {'value': None})
wns = WNSBadge(accesstoken=self.accesstoken, timeout=self.timeout)
elif wnstype == 'raw':
wnsparams.setdefault('raw', 'raw notification')
wns = WNSRaw(accesstoken=self.accesstoken, timeout=self.timeout)
else:
raise WNSInvalidPushTypeException(wnstype)
return wns.send(url, wnsparams)
def request_token(self):
payload = {'grant_type': 'client_credentials',
'client_id': self.clientid,
'client_secret': self.clientsecret,
'scope': 'notify.windows.com'}
response = requests.post(WNSACCESSTOKEN_URL, data=payload, timeout=self.timeout)
if response.status_code != 200:
raise WNSException(response._content)
responsedata = response.json()
self.accesstoken = responsedata['access_token']
self.tokenexpiry = int(responsedata['expires_in']) + int(time.time())
class WNSBase(object):
HEADER_WNS_TYPE = 'X-WNS-Type'
HEADER_WNS_REQUESTFORSTATUS = 'X-WNS-RequestForStatus'
def __init__(self, accesstoken=None, timeout=None):
self.accesstoken = accesstoken
self.timeout = timeout
self.headers = {
'Content-Type': 'text/xml',
'Content-Length': len(self.accesstoken),
'Authorization': 'Bearer %s' % self.accesstoken,
}
def set_type(self, target):
self.headers[self.HEADER_WNS_TYPE] = "wns/%s" % target
def serialize_tree(self, tree):
from io import BytesIO
file = StringIO()
tree.write(file, encoding='utf-8')
contents = file.getvalue()
file.close()
return contents
def optional_attribute(self, element, attribute, payload_param, payload):
if payload_param in payload:
element.attrib['attribute'] = payload[payload_param]
def optional_subelement(self, parent, element, payload_param, payload):
if payload_param in payload:
el = ET.SubElement(parent, element)
el.text = payload[payload_param]
return el
def prepare_payload(self, payload):
raise NotImplementedError('Subclasses should override prepare_payload method')
def parse_response(self, response):
status = {
'deviceconnectionstatus': response.headers.get('X-WNS-DeviceConnectionStatus', ''),
'error_description': response.headers.get('X-WNS-Error-Description', ''),
'msgid': response.headers.get('X-WNS-Msg-ID', ''),
'status': response.headers.get('X-WNS-Status', '')
}
code = response.status_code
status['http_status_code'] = code
if code == 200:
if status['status'] == 'dropped':
status['error'] = 'dropped'
status['backoff_seconds'] = 60
elif code == 400:
status['error'] = 'Bad Request - invalid payload or subscription URI'
elif code == 401:
status['error'] = 'Unauthorized - invalid token or subscription URI'
status['drop_subscription'] = True
elif code == 403:
status['error'] = 'The cloud service is not authorized to send a ' \
'notification to this URI even though they are authenticated.'
elif code == 404:
status['error'] = 'The channel URI is not valid or is not recognized by WNS.'
status['drop_subscription'] = True
elif code == 405:
status['error'] = 'Invalid Method'
elif code == 406:
status['error'] = 'Throttle limit exceeded'
elif code == 410:
status['error'] = 'Channel expired'
status['drop_subscription'] = True
elif code == 413:
status['error'] = 'Payload exceeds size limit'
elif code == 500:
status['error'] = 'Internal Server Error'
status['backoff_seconds'] = 60
elif code == 503:
status['error'] = 'Service Unavailable - try again later'
status['backoff_seconds'] = 60
else:
status['error'] = 'Unexpected status'
return status
def send(self, uri, payload):
"""
Send push message. Input parameters:
uri - channel uri
payload - message payload (see help for subclasses)
accesstoken - token
"""
data = self.prepare_payload(payload)
response = requests.post(uri, headers=self.headers, data=data, timeout=self.timeout)
return self.parse_response(response)
class WNSToast(WNSBase):
def __init__(self, *args, **kwargs):
super(WNSToast, self).__init__(*args, **kwargs)
self.set_type('toast')
def prepare_payload(self, payload):
root = ET.Element("toast")
visual = ET.SubElement(root, 'visual')
binding = ET.SubElement(visual, 'binding')
if 'template' in payload:
binding.attrib['template'] = payload['template']
if 'text' in payload:
count = 1
for t in payload['text']:
el = ET.SubElement(binding, 'text')
el.text = t
el.attrib['id'] = '%d' % count
count += 1
if 'image' in payload:
count = 1
for image in payload['image']:
el = ET.SubElement(binding, 'img')
el.attrib['id'] = '%d' % count
el.attrib['src'] = '%s' % image
count += 1
return self.serialize_tree(ET.ElementTree(root))
class WNSRaw(WNSBase):
HEADER_CONTENT_TYPE = 'Content-Type'
HEADER_X_NOTIFICATION = "X-NotificationClass"
def __init__(self, *args, **kwargs):
super(WNSRaw, self).__init__(*args, **kwargs)
self.set_type('raw')
def set_type(self, target):
super(WNSRaw, self).set_type(target)
self.headers[self.HEADER_CONTENT_TYPE] = "application/octet-stream"
self.headers[self.HEADER_WNS_REQUESTFORSTATUS] = "true"
self.headers[self.HEADER_X_NOTIFICATION] = "3"
def prepare_payload(self, payload):
return payload['raw']
class WNSTile(WNSBase):
def __init__(self, *args, **kwargs):
super(WNSTile, self).__init__(*args, **kwargs)
self.set_type('tile')
def prepare_payload(self, payload):
root = ET.Element("tile")
visual = ET.SubElement(root, 'visual')
binding = ET.SubElement(visual, 'binding')
if 'template' in payload:
binding.attrib['template'] = payload['template']
if 'text' in payload:
count = 1
for t in payload['text']:
el = ET.SubElement(binding, 'text')
el.text = t
el.attrib['id'] = '%d' % count
count += 1
if 'image' in payload:
count = 1
for image in payload['image']:
el = ET.SubElement(binding, 'img')
el.attrib['id'] = '%d' % count
el.attrib['src'] = '%s' % image
count += 1
return self.serialize_tree(ET.ElementTree(root))
class WNSBadge(WNSBase):
def __init__(self, *args, **kwargs):
super(WNSBadge, self).__init__(*args, **kwargs)
self.set_type('badge')
def prepare_payload(self, payload):
root = ET.Element("badge")
root.attrib['value'] = payload['badge']['value']
return self.serialize_tree(ET.ElementTree(root))
|
{
"content_hash": "6e740aa62b5c6d4f2c377eeee261c49d",
"timestamp": "",
"source": "github",
"line_count": 290,
"max_line_length": 95,
"avg_line_length": 37.279310344827586,
"alnum_prop": 0.6081768569050041,
"repo_name": "jaydg/python-wns",
"id": "d4a21a9426bb4140a6b8dc52c221ddf860e7691a",
"size": "10811",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "wns/wnslib.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "13959"
}
],
"symlink_target": ""
}
|
"""The tests for the group cover platform."""
from datetime import timedelta
import pytest
from homeassistant.components.cover import (
ATTR_CURRENT_POSITION,
ATTR_CURRENT_TILT_POSITION,
ATTR_POSITION,
ATTR_TILT_POSITION,
DOMAIN,
)
from homeassistant.components.group.cover import DEFAULT_NAME
from homeassistant.const import (
ATTR_ASSUMED_STATE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_SUPPORTED_FEATURES,
CONF_ENTITIES,
SERVICE_CLOSE_COVER,
SERVICE_CLOSE_COVER_TILT,
SERVICE_OPEN_COVER,
SERVICE_OPEN_COVER_TILT,
SERVICE_SET_COVER_POSITION,
SERVICE_SET_COVER_TILT_POSITION,
SERVICE_STOP_COVER,
SERVICE_STOP_COVER_TILT,
SERVICE_TOGGLE,
SERVICE_TOGGLE_COVER_TILT,
STATE_CLOSED,
STATE_CLOSING,
STATE_OPEN,
STATE_OPENING,
)
from homeassistant.setup import async_setup_component
import homeassistant.util.dt as dt_util
from tests.common import assert_setup_component, async_fire_time_changed
COVER_GROUP = "cover.cover_group"
DEMO_COVER = "cover.kitchen_window"
DEMO_COVER_POS = "cover.hall_window"
DEMO_COVER_TILT = "cover.living_room_window"
DEMO_TILT = "cover.tilt_demo"
CONFIG_ALL = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
},
]
}
CONFIG_POS = {
DOMAIN: [
{"platform": "demo"},
{
"platform": "group",
CONF_ENTITIES: [DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
},
]
}
CONFIG_ATTRIBUTES = {
DOMAIN: {
"platform": "group",
CONF_ENTITIES: [DEMO_COVER, DEMO_COVER_POS, DEMO_COVER_TILT, DEMO_TILT],
}
}
@pytest.fixture
async def setup_comp(hass, config_count):
"""Set up group cover component."""
config, count = config_count
with assert_setup_component(count, DOMAIN):
await async_setup_component(hass, DOMAIN, config)
await hass.async_block_till_done()
await hass.async_start()
await hass.async_block_till_done()
@pytest.mark.parametrize("config_count", [(CONFIG_ATTRIBUTES, 1)])
async def test_attributes(hass, setup_comp):
"""Test handling of state attributes."""
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_FRIENDLY_NAME] == DEFAULT_NAME
assert state.attributes[ATTR_ENTITY_ID] == [
DEMO_COVER,
DEMO_COVER_POS,
DEMO_COVER_TILT,
DEMO_TILT,
]
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports open / close / stop
hass.states.async_set(DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 11
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports set_cover_position
hass.states.async_set(
DEMO_COVER_POS,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 70},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 15
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports open tilt / close tilt / stop tilt
hass.states.async_set(DEMO_TILT, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 112})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 127
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
# Add Entity that supports set_tilt_position
hass.states.async_set(
DEMO_COVER_TILT,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 60},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 255
assert state.attributes[ATTR_CURRENT_POSITION] == 70
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
# ### Test assumed state ###
# ##########################
# For covers
hass.states.async_set(
DEMO_COVER, STATE_OPEN, {ATTR_SUPPORTED_FEATURES: 4, ATTR_CURRENT_POSITION: 100}
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 244
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
hass.states.async_remove(DEMO_COVER)
hass.states.async_remove(DEMO_COVER_POS)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 240
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
# For tilts
hass.states.async_set(
DEMO_TILT,
STATE_OPEN,
{ATTR_SUPPORTED_FEATURES: 128, ATTR_CURRENT_TILT_POSITION: 100},
)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_ASSUMED_STATE] is True
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 128
assert ATTR_CURRENT_POSITION not in state.attributes
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
hass.states.async_remove(DEMO_COVER_TILT)
hass.states.async_set(DEMO_TILT, STATE_CLOSED)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert ATTR_ASSUMED_STATE not in state.attributes
assert state.attributes[ATTR_SUPPORTED_FEATURES] == 0
assert ATTR_CURRENT_POSITION not in state.attributes
assert ATTR_CURRENT_TILT_POSITION not in state.attributes
hass.states.async_set(DEMO_TILT, STATE_CLOSED, {ATTR_ASSUMED_STATE: True})
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.attributes[ATTR_ASSUMED_STATE] is True
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_open_covers(hass, setup_comp):
"""Test open cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_close_covers(hass, setup_comp):
"""Test close cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_toggle_covers(hass, setup_comp):
"""Test toggle cover function."""
# Start covers in open state
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
# Toggle will close covers
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_CLOSED
assert state.attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 0
# Toggle again will open covers
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 100
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_stop_covers(hass, setup_comp):
"""Test stop cover function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 100
assert hass.states.get(DEMO_COVER).state == STATE_OPEN
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 20
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 80
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_set_cover_position(hass, setup_comp):
"""Test set cover position function."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_POSITION,
{ATTR_ENTITY_ID: COVER_GROUP, ATTR_POSITION: 50},
blocking=True,
)
for _ in range(4):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_COVER).state == STATE_CLOSED
assert hass.states.get(DEMO_COVER_POS).attributes[ATTR_CURRENT_POSITION] == 50
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_POSITION] == 50
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_open_tilts(hass, setup_comp):
"""Test open tilt function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(5):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_close_tilts(hass, setup_comp):
"""Test close tilt function."""
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(5):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_toggle_tilts(hass, setup_comp):
"""Test toggle tilt function."""
# Start tilted open
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
# Toggle will tilt closed
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 0
# Toggle again will tilt open
await hass.services.async_call(
DOMAIN, SERVICE_TOGGLE_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100
assert (
hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 100
)
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_stop_tilts(hass, setup_comp):
"""Test stop tilts function."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_STOP_COVER_TILT, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 60
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 60
@pytest.mark.parametrize("config_count", [(CONFIG_ALL, 2)])
async def test_set_tilt_positions(hass, setup_comp):
"""Test set tilt position function."""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_COVER_TILT_POSITION,
{ATTR_ENTITY_ID: COVER_GROUP, ATTR_TILT_POSITION: 80},
blocking=True,
)
for _ in range(3):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
state = hass.states.get(COVER_GROUP)
assert state.state == STATE_OPEN
assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 80
assert hass.states.get(DEMO_COVER_TILT).attributes[ATTR_CURRENT_TILT_POSITION] == 80
@pytest.mark.parametrize("config_count", [(CONFIG_POS, 2)])
async def test_is_opening_closing(hass, setup_comp):
"""Test is_opening property."""
await hass.services.async_call(
DOMAIN, SERVICE_OPEN_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING
assert hass.states.get(DEMO_COVER_TILT).state == STATE_OPENING
assert hass.states.get(COVER_GROUP).state == STATE_OPENING
for _ in range(10):
future = dt_util.utcnow() + timedelta(seconds=1)
async_fire_time_changed(hass, future)
await hass.async_block_till_done()
await hass.services.async_call(
DOMAIN, SERVICE_CLOSE_COVER, {ATTR_ENTITY_ID: COVER_GROUP}, blocking=True
)
assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING
assert hass.states.get(DEMO_COVER_TILT).state == STATE_CLOSING
assert hass.states.get(COVER_GROUP).state == STATE_CLOSING
hass.states.async_set(DEMO_COVER_POS, STATE_OPENING, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_OPENING
assert hass.states.get(COVER_GROUP).state == STATE_OPENING
hass.states.async_set(DEMO_COVER_POS, STATE_CLOSING, {ATTR_SUPPORTED_FEATURES: 11})
await hass.async_block_till_done()
assert hass.states.get(DEMO_COVER_POS).state == STATE_CLOSING
assert hass.states.get(COVER_GROUP).state == STATE_CLOSING
|
{
"content_hash": "4b37d95cc9333ece9d80b4d9da93f115",
"timestamp": "",
"source": "github",
"line_count": 526,
"max_line_length": 88,
"avg_line_length": 35.77946768060836,
"alnum_prop": 0.6815090329436769,
"repo_name": "soldag/home-assistant",
"id": "2ffe02570c9fa1f7a96ed7cd0ac545f40c5ad2ba",
"size": "18820",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "tests/components/group/test_cover.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "19025087"
},
{
"name": "Shell",
"bytes": "6846"
}
],
"symlink_target": ""
}
|
__version__ = '2.3.0'
from sys import version_info
def detect(aBuf):
if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
(version_info >= (3, 0) and not isinstance(aBuf, bytes))):
raise ValueError('Expected a bytes object, not a unicode object')
from . import universaldetector
u = universaldetector.UniversalDetector()
u.reset()
u.feed(aBuf)
u.close()
return u.result
|
{
"content_hash": "95322e695c162efaea5d240694a9f466",
"timestamp": "",
"source": "github",
"line_count": 15,
"max_line_length": 73,
"avg_line_length": 28.6,
"alnum_prop": 0.6386946386946387,
"repo_name": "Ritsyy/fjord",
"id": "f1f4235a8784ab8ed58b6d2e8c3fcfc2a7117676",
"size": "1295",
"binary": false,
"copies": "6",
"ref": "refs/heads/master",
"path": "vendor/packages/requests-2.7.0/requests/packages/chardet/__init__.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "158694"
},
{
"name": "HTML",
"bytes": "128135"
},
{
"name": "JavaScript",
"bytes": "302359"
},
{
"name": "Python",
"bytes": "884131"
},
{
"name": "Shell",
"bytes": "11743"
},
{
"name": "Smarty",
"bytes": "825"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function, unicode_literals
import os
from builtins import open
from future.utils import text_type
from pants.backend.graph_info.subsystems.cloc_binary import ClocBinary
from pants.base.workunit import WorkUnitLabel
from pants.engine.fs import FilesContent, PathGlobs, PathGlobsAndRoot
from pants.engine.isolated_process import ExecuteProcessRequest
from pants.task.console_task import ConsoleTask
from pants.util.contextutil import temporary_dir
class CountLinesOfCode(ConsoleTask):
"""Print counts of lines of code."""
@classmethod
def subsystem_dependencies(cls):
return super(CountLinesOfCode, cls).subsystem_dependencies() + (ClocBinary,)
@classmethod
def register_options(cls, register):
super(CountLinesOfCode, cls).register_options(register)
register('--transitive', type=bool, fingerprint=True, default=True,
help='Operate on the transitive dependencies of the specified targets. '
'Unset to operate only on the specified targets.')
register('--ignored', type=bool, fingerprint=True,
help='Show information about files ignored by cloc.')
def console_output(self, targets):
if not self.get_options().transitive:
targets = self.context.target_roots
input_snapshots = tuple(
target.sources_snapshot(scheduler=self.context._scheduler) for target in targets
)
input_files = {f for snapshot in input_snapshots for f in snapshot.files}
# TODO: Work out a nice library-like utility for writing an argfile, as this will be common.
with temporary_dir() as tmpdir:
list_file = os.path.join(tmpdir, 'input_files_list')
with open(list_file, 'w') as list_file_out:
for input_file in sorted(input_files):
list_file_out.write(input_file)
list_file_out.write('\n')
list_file_snapshot = self.context._scheduler.capture_snapshots((
PathGlobsAndRoot(
PathGlobs(('input_files_list',)),
text_type(tmpdir),
),
))[0]
cloc_path, cloc_snapshot = ClocBinary.global_instance().hackily_snapshot(self.context)
directory_digest = self.context._scheduler.merge_directories(tuple(s.directory_digest for s in
input_snapshots + (
cloc_snapshot,
list_file_snapshot,
)))
cmd = (
'/usr/bin/perl',
cloc_path,
'--skip-uniqueness',
'--ignored=ignored',
'--list-file=input_files_list',
'--report-file=report',
)
# The cloc script reaches into $PATH to look up perl. Let's assume it's in /usr/bin.
req = ExecuteProcessRequest(
argv=cmd,
input_files=directory_digest,
output_files=('ignored', 'report'),
description='cloc',
)
exec_result = self.context.execute_process_synchronously_without_raising(req, 'cloc', (WorkUnitLabel.TOOL,))
files_content_tuple = self.context._scheduler.product_request(
FilesContent,
[exec_result.output_directory_digest]
)[0].dependencies
files_content = {fc.path: fc.content.decode('utf-8') for fc in files_content_tuple}
for line in files_content['report'].split('\n'):
yield line
if self.get_options().ignored:
yield 'Ignored the following files:'
for line in files_content['ignored'].split('\n'):
yield line
|
{
"content_hash": "4c11edf0378f7b65a3832bcf21631515",
"timestamp": "",
"source": "github",
"line_count": 93,
"max_line_length": 112,
"avg_line_length": 35.98924731182796,
"alnum_prop": 0.6847923513594264,
"repo_name": "twitter/pants",
"id": "e1903d9dfd5ae7327ea8a2b1dc8dda4e53a2c36d",
"size": "3494",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/python/pants/backend/graph_info/tasks/cloc.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "655"
},
{
"name": "C++",
"bytes": "2010"
},
{
"name": "CSS",
"bytes": "9444"
},
{
"name": "Dockerfile",
"bytes": "5639"
},
{
"name": "GAP",
"bytes": "1283"
},
{
"name": "Gherkin",
"bytes": "919"
},
{
"name": "Go",
"bytes": "2765"
},
{
"name": "HTML",
"bytes": "85294"
},
{
"name": "Java",
"bytes": "498956"
},
{
"name": "JavaScript",
"bytes": "22906"
},
{
"name": "Python",
"bytes": "6700799"
},
{
"name": "Rust",
"bytes": "765598"
},
{
"name": "Scala",
"bytes": "89346"
},
{
"name": "Shell",
"bytes": "94395"
},
{
"name": "Thrift",
"bytes": "2953"
}
],
"symlink_target": ""
}
|
"""Common test objects."""
import copy
import json
from hatasmota.const import (
CONF_MAC,
CONF_OFFLINE,
CONF_ONLINE,
CONF_PREFIX,
PREFIX_CMND,
PREFIX_TELE,
)
from hatasmota.utils import (
config_get_state_offline,
config_get_state_online,
get_topic_tele_state,
get_topic_tele_will,
)
from homeassistant.components.tasmota.const import DEFAULT_PREFIX
from homeassistant.const import STATE_UNAVAILABLE
from tests.async_mock import ANY
from tests.common import async_fire_mqtt_message
DEFAULT_CONFIG = {
"ip": "192.168.15.10",
"dn": "Tasmota",
"fn": ["Test", "Beer", "Milk", "Four", None],
"hn": "tasmota_49A3BC-0956",
"if": 0, # iFan
"lk": 1, # RGB + white channels linked to a single light
"mac": "00000049A3BC",
"md": "Sonoff Basic",
"ofln": "Offline",
"onln": "Online",
"state": ["OFF", "ON", "TOGGLE", "HOLD"],
"sw": "8.4.0.2",
"swn": [None, None, None, None, None],
"t": "tasmota_49A3BC",
"ft": "%topic%/%prefix%/",
"tp": ["cmnd", "stat", "tele"],
"rl": [0, 0, 0, 0, 0, 0, 0, 0],
"swc": [-1, -1, -1, -1, -1, -1, -1, -1],
"btn": [0, 0, 0, 0],
"so": {
"4": 0, # Return MQTT response as RESULT or %COMMAND%
"11": 0, # Swap button single and double press functionality
"13": 0, # Allow immediate action on single button press
"17": 1, # Show Color string as hex or comma-separated
"20": 0, # Update of Dimmer/Color/CT without turning power on
"30": 0, # Enforce Home Assistant auto-discovery as light
"68": 0, # Multi-channel PWM instead of a single light
"73": 0, # Enable Buttons decoupling and send multi-press and hold MQTT messages
"82": 0, # Reduce the CT range from 153..500 to 200.380
"114": 0, # Enable sending switch MQTT messages
},
"ty": 0, # Tuya MCU
"lt_st": 0,
"ver": 1,
}
DEFAULT_CONFIG_9_0_0_3 = {
"ip": "192.168.15.10",
"dn": "Tasmota",
"fn": ["Test", "Beer", "Milk", "Four", None],
"hn": "tasmota_49A3BC-0956",
"lk": 1, # RGB + white channels linked to a single light
"mac": "00000049A3BC",
"md": "Sonoff Basic",
"ofln": "Offline",
"onln": "Online",
"state": ["OFF", "ON", "TOGGLE", "HOLD"],
"sw": "8.4.0.2",
"t": "tasmota_49A3BC",
"ft": "%topic%/%prefix%/",
"tp": ["cmnd", "stat", "tele"],
"rl": [0, 0, 0, 0, 0, 0, 0, 0],
"swc": [-1, -1, -1, -1, -1, -1, -1, -1],
"btn": [0, 0, 0, 0],
"so": {
"11": 0, # Swap button single and double press functionality
"13": 0, # Allow immediate action on single button press
"17": 1, # Show Color string as hex or comma-separated
"20": 0, # Update of Dimmer/Color/CT without turning power on
"30": 0, # Enforce Home Assistant auto-discovery as light
"68": 0, # Multi-channel PWM instead of a single light
"73": 0, # Enable Buttons decoupling and send multi-press and hold MQTT messages
"80": 0, # Blinds and shutters support
"82": 0, # Reduce the CT range from 153..500 to 200.380
},
"ty": 0, # Tuya MCU
"lt_st": 0,
"ver": 1,
}
async def help_test_availability_when_connection_lost(
hass,
mqtt_client_mock,
mqtt_mock,
domain,
config,
sensor_config=None,
entity_id="test",
):
"""Test availability after MQTT disconnection.
This is a test helper for the TasmotaAvailability mixin.
"""
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config",
json.dumps(config),
)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
# Device online
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_online(config),
)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
# Disconnected from MQTT server -> state changed to unavailable
mqtt_mock.connected = False
await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
# Reconnected to MQTT server -> state still unavailable
mqtt_mock.connected = True
await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
# Receive LWT again
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_online(config),
)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
async def help_test_availability(
hass,
mqtt_mock,
domain,
config,
sensor_config=None,
entity_id="test",
):
"""Test availability.
This is a test helper for the TasmotaAvailability mixin.
"""
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config",
json.dumps(config),
)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_online(config),
)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_offline(config),
)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
async def help_test_availability_discovery_update(
hass,
mqtt_mock,
domain,
config,
sensor_config=None,
entity_id="test",
):
"""Test update of discovered TasmotaAvailability.
This is a test helper for the TasmotaAvailability mixin.
"""
# customize availability topic
config1 = copy.deepcopy(config)
config1[CONF_PREFIX][PREFIX_TELE] = "tele1"
config1[CONF_OFFLINE] = "offline1"
config1[CONF_ONLINE] = "online1"
config2 = copy.deepcopy(config)
config2[CONF_PREFIX][PREFIX_TELE] = "tele2"
config2[CONF_OFFLINE] = "offline2"
config2[CONF_ONLINE] = "online2"
data1 = json.dumps(config1)
data2 = json.dumps(config2)
availability_topic1 = get_topic_tele_will(config1)
availability_topic2 = get_topic_tele_will(config2)
assert availability_topic1 != availability_topic2
offline1 = config_get_state_offline(config1)
offline2 = config_get_state_offline(config2)
assert offline1 != offline2
online1 = config_get_state_online(config1)
online2 = config_get_state_online(config2)
assert online1 != online2
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config1[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
async_fire_mqtt_message(hass, availability_topic1, online1)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, availability_topic1, offline1)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
# Change availability settings
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config2[CONF_MAC]}/config", data2)
await hass.async_block_till_done()
# Verify we are no longer subscribing to the old topic or payload
async_fire_mqtt_message(hass, availability_topic1, online1)
async_fire_mqtt_message(hass, availability_topic1, online2)
async_fire_mqtt_message(hass, availability_topic2, online1)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
# Verify we are subscribing to the new topic
async_fire_mqtt_message(hass, availability_topic2, online2)
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
async def help_test_availability_poll_state(
hass,
mqtt_client_mock,
mqtt_mock,
domain,
config,
poll_topic,
poll_payload,
sensor_config=None,
):
"""Test polling of state when device is available.
This is a test helper for the TasmotaAvailability mixin.
"""
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config",
json.dumps(config),
)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
mqtt_mock.async_publish.reset_mock()
# Device online, verify poll for state
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_online(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(poll_topic, poll_payload, 0, False)
mqtt_mock.async_publish.reset_mock()
# Disconnected from MQTT server
mqtt_mock.connected = False
await hass.async_add_executor_job(mqtt_client_mock.on_disconnect, None, None, 0)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
# Reconnected to MQTT server
mqtt_mock.connected = True
await hass.async_add_executor_job(mqtt_client_mock.on_connect, None, None, None, 0)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
assert not mqtt_mock.async_publish.called
# Device online, verify poll for state
async_fire_mqtt_message(
hass,
get_topic_tele_will(config),
config_get_state_online(config),
)
await hass.async_block_till_done()
await hass.async_block_till_done()
await hass.async_block_till_done()
mqtt_mock.async_publish.assert_called_once_with(poll_topic, poll_payload, 0, False)
async def help_test_discovery_removal(
hass,
mqtt_mock,
caplog,
domain,
config1,
config2,
sensor_config1=None,
sensor_config2=None,
entity_id="test",
name="Test",
):
"""Test removal of discovered entity."""
device_reg = await hass.helpers.device_registry.async_get_registry()
entity_reg = await hass.helpers.entity_registry.async_get_registry()
data1 = json.dumps(config1)
data2 = json.dumps(config2)
assert config1[CONF_MAC] == config2[CONF_MAC]
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config1[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
if sensor_config1:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config1[CONF_MAC]}/sensors",
json.dumps(sensor_config1),
)
await hass.async_block_till_done()
# Verify device and entity registry entries are created
device_entry = device_reg.async_get_device(set(), {("mac", config1[CONF_MAC])})
assert device_entry is not None
entity_entry = entity_reg.async_get(f"{domain}.{entity_id}")
assert entity_entry is not None
# Verify state is added
state = hass.states.get(f"{domain}.{entity_id}")
assert state is not None
assert state.name == name
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config2[CONF_MAC]}/config", data2)
await hass.async_block_till_done()
if sensor_config1:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config2[CONF_MAC]}/sensors",
json.dumps(sensor_config2),
)
await hass.async_block_till_done()
# Verify entity registry entries are cleared
device_entry = device_reg.async_get_device(set(), {("mac", config2[CONF_MAC])})
assert device_entry is not None
entity_entry = entity_reg.async_get(f"{domain}.{entity_id}")
assert entity_entry is None
# Verify state is removed
state = hass.states.get(f"{domain}.{entity_id}")
assert state is None
async def help_test_discovery_update_unchanged(
hass,
mqtt_mock,
caplog,
domain,
config,
discovery_update,
sensor_config=None,
entity_id="test",
name="Test",
):
"""Test update of discovered component with and without changes.
This is a test helper for the MqttDiscoveryUpdate mixin.
"""
config1 = copy.deepcopy(config)
config2 = copy.deepcopy(config)
config2[CONF_PREFIX][PREFIX_CMND] = "cmnd2"
config2[CONF_PREFIX][PREFIX_TELE] = "tele2"
data1 = json.dumps(config1)
data2 = json.dumps(config2)
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state is not None
assert state.name == name
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data1)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
assert not discovery_update.called
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data2)
await hass.async_block_till_done()
assert discovery_update.called
async def help_test_discovery_device_remove(
hass, mqtt_mock, domain, unique_id, config, sensor_config=None
):
"""Test domain entity is removed when device is removed."""
device_reg = await hass.helpers.device_registry.async_get_registry()
entity_reg = await hass.helpers.entity_registry.async_get_registry()
config = copy.deepcopy(config)
data = json.dumps(config)
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
device = device_reg.async_get_device(set(), {("mac", config[CONF_MAC])})
assert device is not None
assert entity_reg.async_get_entity_id(domain, "tasmota", unique_id)
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", "")
await hass.async_block_till_done()
device = device_reg.async_get_device(set(), {("mac", config[CONF_MAC])})
assert device is None
assert not entity_reg.async_get_entity_id(domain, "tasmota", unique_id)
async def help_test_entity_id_update_subscriptions(
hass, mqtt_mock, domain, config, topics=None, sensor_config=None, entity_id="test"
):
"""Test MQTT subscriptions are managed when entity_id is updated."""
entity_reg = await hass.helpers.entity_registry.async_get_registry()
config = copy.deepcopy(config)
data = json.dumps(config)
mqtt_mock.async_subscribe.reset_mock()
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
if not topics:
topics = [get_topic_tele_state(config), get_topic_tele_will(config)]
assert len(topics) > 0
state = hass.states.get(f"{domain}.{entity_id}")
assert state is not None
assert mqtt_mock.async_subscribe.call_count == len(topics)
for topic in topics:
mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY)
mqtt_mock.async_subscribe.reset_mock()
entity_reg.async_update_entity(
f"{domain}.{entity_id}", new_entity_id=f"{domain}.milk"
)
await hass.async_block_till_done()
state = hass.states.get(f"{domain}.{entity_id}")
assert state is None
state = hass.states.get(f"{domain}.milk")
assert state is not None
for topic in topics:
mqtt_mock.async_subscribe.assert_any_call(topic, ANY, ANY, ANY)
async def help_test_entity_id_update_discovery_update(
hass, mqtt_mock, domain, config, sensor_config=None, entity_id="test"
):
"""Test MQTT discovery update after entity_id is updated."""
entity_reg = await hass.helpers.entity_registry.async_get_registry()
config = copy.deepcopy(config)
data = json.dumps(config)
topic = get_topic_tele_will(config)
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data)
await hass.async_block_till_done()
if sensor_config:
async_fire_mqtt_message(
hass,
f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/sensors",
json.dumps(sensor_config),
)
await hass.async_block_till_done()
async_fire_mqtt_message(hass, topic, config_get_state_online(config))
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state != STATE_UNAVAILABLE
async_fire_mqtt_message(hass, topic, config_get_state_offline(config))
state = hass.states.get(f"{domain}.{entity_id}")
assert state.state == STATE_UNAVAILABLE
entity_reg.async_update_entity(
f"{domain}.{entity_id}", new_entity_id=f"{domain}.milk"
)
await hass.async_block_till_done()
assert hass.states.get(f"{domain}.milk")
assert config[CONF_PREFIX][PREFIX_TELE] != "tele2"
config[CONF_PREFIX][PREFIX_TELE] = "tele2"
data = json.dumps(config)
async_fire_mqtt_message(hass, f"{DEFAULT_PREFIX}/{config[CONF_MAC]}/config", data)
await hass.async_block_till_done()
assert len(hass.states.async_entity_ids(domain)) == 1
topic = get_topic_tele_will(config)
async_fire_mqtt_message(hass, topic, config_get_state_online(config))
state = hass.states.get(f"{domain}.milk")
assert state.state != STATE_UNAVAILABLE
|
{
"content_hash": "12778654518d007eb28b3dff7a09e2b6",
"timestamp": "",
"source": "github",
"line_count": 592,
"max_line_length": 89,
"avg_line_length": 32.66891891891892,
"alnum_prop": 0.637693898655636,
"repo_name": "tboyce021/home-assistant",
"id": "04346f915c44acc9d8f837131f94c341ba032357",
"size": "19340",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/components/tasmota/test_common.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1488"
},
{
"name": "Python",
"bytes": "28861968"
},
{
"name": "Shell",
"bytes": "4815"
}
],
"symlink_target": ""
}
|
import logging
logger = logging.getLogger(__name__)
from bottle import request
request_logger = logging.getLogger('request.params')
def logplugin(callback):
def wrapper(*args, **kwargs):
request_logger.info(request.params)
request.remote_addr = request.environ.get("X_FORWARDED_FOR", request.remote_addr)
body = callback(*args, **kwargs)
request_logger.info(body)
return body
return wrapper
|
{
"content_hash": "400abee50c35e185fcb8626dd06b88d6",
"timestamp": "",
"source": "github",
"line_count": 18,
"max_line_length": 89,
"avg_line_length": 24.72222222222222,
"alnum_prop": 0.6831460674157304,
"repo_name": "mengzhuo/zaquming_web",
"id": "901c4cd1cc0141f25b31f3cfca6f300c629997e9",
"size": "485",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "plugins.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1186"
},
{
"name": "Python",
"bytes": "3933"
},
{
"name": "Smarty",
"bytes": "2665"
}
],
"symlink_target": ""
}
|
"""
REST API Documentation for the NRS TFRS Credit Trading Application
The Transportation Fuels Reporting System is being designed to streamline compliance reporting for transportation fuel suppliers in accordance with the Renewable & Low Carbon Fuel Requirements Regulation.
OpenAPI spec version: v1
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import datetime
from django.db import models
from django.utils import timezone
class FuelSupplierType(models.Model):
theType = models.CharField(max_length=255)
description = models.CharField(max_length=255, blank=True, null=True)
effectiveDate = models.DateField()
expirationDate = models.DateField()
displayOrder = models.IntegerField()
|
{
"content_hash": "27fb65f0c88eac6e7951cfdbaed5f1f3",
"timestamp": "",
"source": "github",
"line_count": 34,
"max_line_length": 208,
"avg_line_length": 37.411764705882355,
"alnum_prop": 0.7421383647798742,
"repo_name": "Kiesum/tfrs-1",
"id": "4a9782379bd70c60d6cb5af4ec4006cb485e1669",
"size": "1272",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "APISpec/gen/models/FuelSupplierType.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "6813"
},
{
"name": "CSS",
"bytes": "482690"
},
{
"name": "Groovy",
"bytes": "5034"
},
{
"name": "HTML",
"bytes": "235627"
},
{
"name": "JavaScript",
"bytes": "272050"
},
{
"name": "Python",
"bytes": "597343"
},
{
"name": "Shell",
"bytes": "10011"
}
],
"symlink_target": ""
}
|
from dragonfly import (Grammar, AppContext, MappingRule, Dictation, Key, Text, Integer, Mimic)
context = AppContext(title = "jade")
grammar = Grammar("jade", context=context)
noSpaceNoCaps = Mimic("\\no-caps-on") + Mimic("\\no-space-on")
rules = MappingRule(
name = "jade",
mapping = {
"heading [<n>]": Text("h%(n)d "),
"span [<n>]": Text(".span%(n)d"),
"paragraph": Text("p ") + noSpaceNoCaps,
"link": Text("link") + Key("tab") + noSpaceNoCaps,
"attribute": Text("attribute") + Key("tab") + noSpaceNoCaps,
"Eckelberry": Text("echo_var") + Key("tab") + noSpaceNoCaps,
"row fluid": Text(".row-fluid ") + noSpaceNoCaps,
"row": Text(".row") + noSpaceNoCaps,
"container": Text(".container") + noSpaceNoCaps,
"unordered list": Text("ul") + noSpaceNoCaps,
"list item": Text("li") + noSpaceNoCaps,
"image": Text("image") + Key("tab") + noSpaceNoCaps,
"equal": Text("=") + noSpaceNoCaps,
},
extras = [
Dictation("text"),
Integer("n", 0, 20000),
],
defaults = {
"n" : 1
}
)
grammar.add_rule(rules)
grammar.load()
def unload():
global grammar
if grammar: grammar.unload()
grammar = None
|
{
"content_hash": "ae3e67d5a8c442ad248cef89a62c85c7",
"timestamp": "",
"source": "github",
"line_count": 41,
"max_line_length": 94,
"avg_line_length": 29.926829268292682,
"alnum_prop": 0.5753871230643847,
"repo_name": "simianhacker/code-by-voice",
"id": "56d2d5a48a77192256135edecc3e18dc6b39d401",
"size": "1227",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "macros/_jade.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "427"
},
{
"name": "Python",
"bytes": "89834"
}
],
"symlink_target": ""
}
|
import argparse
import os
import json
from chrome_telemetry_build import android_browser_types
from core import path_util
from core import bot_platforms
_VALID_SWARMING_DIMENSIONS = {
'gpu', 'device_ids', 'os', 'pool', 'perf_tests', 'perf_tests_with_args',
'cpu', 'device_os', 'device_status', 'device_type', 'device_os_flavor',
'id', 'mac_model', 'synthetic_product_name'
}
_DEFAULT_VALID_PERF_POOLS = {
'chrome.tests.perf',
'chrome.tests.perf-webview',
'chrome.tests.perf-weblayer',
'chrome.tests.perf-fyi',
'chrome.tests.perf-webview-fyi',
}
_VALID_PERF_POOLS = {
'android-builder-perf': {'chrome.tests'},
'android_arm64-builder-perf': {'chrome.tests'},
'android-pixel4a_power-perf': {'chrome.tests.pinpoint'},
'chromecast-linux-builder-perf': {'chrome.tests'},
'chromeos-kevin-perf-fyi': {'chrome.tests'},
'chromeos-amd64-generic-lacros-builder-perf': {'chrome.tests'},
'fuchsia-perf-fyi': {'chrome.tests'},
'fuchsia-perf-atlas-fyi': {'chrome.tests'},
'fuchsia-perf-sherlock-fyi': {'chrome.tests'},
'linux-builder-perf': {'chrome.tests'},
'mac-arm-builder-perf': {'chrome.tests'},
'mac-builder-perf': {'chrome.tests'},
'win32-builder-perf': {'chrome.tests'},
'win64-builder-perf': {'chrome.tests'},
}
_VALID_WEBVIEW_BROWSERS = {
'android-webview',
'android-webview-google',
'android-webview-trichrome-google-bundle',
}
_PERFORMANCE_TEST_SUITES = {
'performance_test_suite',
'performance_test_suite_eve',
'performance_webview_test_suite',
}
for suffix in android_browser_types.TELEMETRY_ANDROID_BROWSER_TARGET_SUFFIXES:
_PERFORMANCE_TEST_SUITES.add('performance_test_suite' + suffix)
def _ValidateSwarmingDimension(builder_name, swarming_dimensions):
for dimension in swarming_dimensions:
for k, v in dimension.items():
if k not in _VALID_SWARMING_DIMENSIONS:
raise ValueError('Invalid swarming dimension in %s: %s' % (
builder_name, k))
if k == 'pool' and v not in _VALID_PERF_POOLS.get(
builder_name, _DEFAULT_VALID_PERF_POOLS):
raise ValueError('Invalid perf pool %s in %s' % (v, builder_name))
if k == 'os' and v == 'Android':
if (not 'device_type' in dimension.keys() or
not 'device_os' in dimension.keys() or
not 'device_os_flavor' in dimension.keys()):
raise ValueError(
'Invalid android dimensions %s in %s' % (v, builder_name))
def _ParseShardMapFileName(args):
parser = argparse.ArgumentParser()
parser.add_argument('--test-shard-map-filename', dest='shard_file')
options, _ = parser.parse_known_args(args)
return options.shard_file
def _ParseBrowserFlags(args):
parser = argparse.ArgumentParser()
parser.add_argument('--browser')
parser.add_argument('--webview-embedder-apk', action='append')
options, _ = parser.parse_known_args(args)
return options
_SHARD_MAP_DIR = os.path.join(os.path.dirname(__file__), 'shard_maps')
def _ValidateShardingData(builder_name, test_config):
num_shards = test_config['swarming'].get('shards', 1)
if num_shards == 1:
return
shard_file_name = _ParseShardMapFileName(test_config['args'])
if not shard_file_name:
raise ValueError('Must specify the shard map for case num shard >= 2')
shard_file_path = os.path.join(_SHARD_MAP_DIR, shard_file_name)
if not os.path.exists(shard_file_path):
raise ValueError(
"shard test file %s in config of builder %s does not exist" % (
repr(shard_file_name), repr(builder_name)))
with open(shard_file_path) as f:
shard_map_data = json.load(f)
shard_map_data.pop('extra_infos', None)
shard_keys = set(shard_map_data.keys())
expected_shard_keys = set([str(i) for i in range(num_shards)])
if shard_keys != expected_shard_keys:
raise ValueError(
'The shard configuration of %s does not match the expected expected '
'number of shards (%d) in config of builder %s' % (
repr(shard_file_name), num_shards, repr(builder_name)))
def _ValidateBrowserType(builder_name, test_config):
browser_options = _ParseBrowserFlags(test_config['args'])
if 'WebView' in builder_name or 'webview' in builder_name:
if browser_options.browser not in _VALID_WEBVIEW_BROWSERS:
raise ValueError('%s must use one of the following browsers: %s' %
(builder_name, ', '.join(_VALID_WEBVIEW_BROWSERS)))
elif 'Android' in builder_name or 'android' in builder_name:
android_browsers = ('android-chromium', 'android-chrome',
'android-chrome-bundle', 'android-chrome-64-bundle',
'android-trichrome-bundle', 'exact')
if browser_options.browser not in android_browsers:
raise ValueError( 'The browser type for %s must be one of %s' % (
builder_name, ', '.join(android_browsers)))
elif 'chromeos' in builder_name:
if browser_options.browser != 'cros-chrome':
raise ValueError("%s must use 'cros-chrome' browser type" %
builder_name)
elif 'lacros' in builder_name:
if browser_options.browser != 'lacros-chrome':
raise ValueError("%s must use 'lacros-chrome' browser type" %
builder_name)
elif builder_name in ('win-10-perf', 'Win 7 Nvidia GPU Perf',
'win-10_laptop_low_end-perf_HP-Candidate',
'win-10_laptop_low_end-perf', 'win-10_amd-perf',
'win-10_amd_laptop-perf'):
if browser_options.browser != 'release_x64':
raise ValueError("%s must use 'release_x64' browser type" %
builder_name)
else: # The rest must be desktop/laptop builders
if browser_options.browser != 'release':
raise ValueError("%s must use 'release' browser type" %
builder_name)
def ValidateTestingBuilder(builder_name, builder_data):
isolated_scripts = builder_data['isolated_scripts']
test_names = []
for test_config in isolated_scripts:
test_names.append(test_config['name'])
_ValidateSwarmingDimension(
builder_name,
swarming_dimensions=test_config['swarming'].get('dimension_sets', {}))
if test_config['isolate_name'] in _PERFORMANCE_TEST_SUITES:
_ValidateShardingData(builder_name, test_config)
_ValidateBrowserType(builder_name, test_config)
if any(suite in test_names for suite in _PERFORMANCE_TEST_SUITES):
if test_names[-1] not in _PERFORMANCE_TEST_SUITES:
raise ValueError(
'performance_test_suite-based targets must run at the end of builder '
'%s to avoid starving other test step (see crbug.com/873389). '
'Instead found %s' % (repr(builder_name), test_names[-1]))
def _IsBuilderName(name):
return not name.startswith('AAA')
def _IsTestingBuilder(builder_name, builder_data):
del builder_name # unused
return 'isolated_scripts' in builder_data
def ValidatePerfConfigFile(file_handle, is_main_perf_waterfall):
perf_data = json.load(file_handle)
perf_testing_builder_names = set()
for key, value in perf_data.items():
if not _IsBuilderName(key):
continue
if _IsTestingBuilder(builder_name=key, builder_data=value):
ValidateTestingBuilder(builder_name=key, builder_data=value)
try:
trigger_script = value['isolated_scripts'][-1]['trigger_script'][
'script']
except KeyError:
continue
if trigger_script == '//testing/trigger_scripts/perf_device_trigger.py':
perf_testing_builder_names.add(key)
if (is_main_perf_waterfall and
perf_testing_builder_names != bot_platforms.OFFICIAL_PLATFORM_NAMES):
raise ValueError(
'Found mismatches between actual perf waterfall builders and platforms '
'in core.bot_platforms. Please update the platforms in '
'bot_platforms.py.\nPlatforms should be added to core.bot_platforms:%s'
'\nPlatforms should be removed from core.bot_platforms:%s' % (
perf_testing_builder_names - bot_platforms.OFFICIAL_PLATFORM_NAMES,
bot_platforms.OFFICIAL_PLATFORM_NAMES - perf_testing_builder_names))
def main(args):
del args # unused
waterfall_file = os.path.join(
path_util.GetChromiumSrcDir(), 'testing', 'buildbot',
'chromium.perf.json')
fyi_waterfall_file = os.path.join(
path_util.GetChromiumSrcDir(), 'testing', 'buildbot',
'chromium.perf.fyi.json')
calibration_waterfall_file = os.path.join(path_util.GetChromiumSrcDir(),
'testing', 'buildbot',
'chromium.perf.calibration.json')
with open(fyi_waterfall_file) as f:
ValidatePerfConfigFile(f, False)
with open(waterfall_file) as f:
ValidatePerfConfigFile(f, True)
with open(calibration_waterfall_file) as f:
ValidatePerfConfigFile(f, False)
|
{
"content_hash": "81680606314f0d4375eb5c2ef856d5b4",
"timestamp": "",
"source": "github",
"line_count": 222,
"max_line_length": 80,
"avg_line_length": 40.08108108108108,
"alnum_prop": 0.6574511126095752,
"repo_name": "scheib/chromium",
"id": "780019cb0354c0c0a48b990123a06c1a369b1723",
"size": "9061",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tools/perf/core/perf_json_config_validator.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
"""Provides target dimension object."""
from __future__ import absolute_import
from warnings import warn
from ..errors import ClientError
from ..entity import Entity, SubEntity
from .targetvalue import TargetValue
class TargetDimension(SubEntity):
"""TargetDimension object. Used for most current targeting settings."""
collection = 'target_dimensions'
resource = 'target_dimension'
_relations = {
'strategy', 'target_value',
}
_pull = {
'_type': None,
'exclude': None,
'include': None,
}
_push = _pull
def __init__(self, session, properties=None, **kwargs):
super(TargetDimension, self).__init__(session, properties, **kwargs)
super(Entity, self).__setattr__('environment',
kwargs.get('environment'))
self._deserialize_targets()
def _deserialize_targets(self):
self.include, self.exclude = list(self.include), list(self.exclude)
for index, ent_dict in enumerate(self.exclude):
self.exclude[index] = TargetValue(self.session,
properties=ent_dict,
environment=self.environment)
for index, ent_dict in enumerate(self.include):
self.include[index] = TargetValue(self.session,
properties=ent_dict,
environment=self.environment)
def save(self, data=None, **kwargs):
"""Saves the TargetDimension object.
data: optional dict of properties
"""
if 'obj' in kwargs:
warn('The obj flag is deprecated: please discontinue use.',
DeprecationWarning, stacklevel=2)
if data is None:
data = {}
data.update({
'exclude': [location.id if isinstance(location, TargetValue)
else location for location in self.exclude],
'include': [location.id if isinstance(location, TargetValue)
else location for location in self.include],
# TargetDimension doesn't have a version associated.
# But we want to use .save, rather than ._post.
# As such, we need to have a version number included.
# Setting it to None will make _validate_form_post yank it from the body
'version': None,
})
super(TargetDimension, self).save(data=data)
self._deserialize_targets()
def add(self, group, target):
"""Add target value by ID or instance to group"""
url = ['target_values', 0]
if isinstance(target, TargetValue):
group.append(target)
elif isinstance(target, int):
target = [target, ]
if hasattr(target, '__iter__'):
for child_id in target:
url[1] = str(child_id)
entities, _ = super(TargetDimension, self)._get(self._get_service_path(),
'/'.join(url))
group.append(TargetValue(self.session,
properties=next(entities),
environment=self.environment))
else:
raise ClientError('add_to target should be an int or iterator')
def add_to(self, group, target):
"""Alias for add to retain compatibility"""
warn('Deprecated; use `add\' method', DeprecationWarning)
return self.add(group, target)
def remove(self, group, target):
"""Remove target value by ID or instance from group"""
target_values = dict((target_value.id, target_value)
for target_value in group)
if isinstance(target, list):
for child_id in target:
try:
group.remove(target_values[child_id])
except ValueError:
raise ClientError('Target value with ID {} not in '
'given group.'.format(child_id))
if isinstance(target, int):
try:
group.remove(target_values[target])
except ValueError:
raise ClientError('Target value with ID {} not in '
'given group.'.format(target))
def remove_from(self, group, target):
"""Alias for remove to retain compatibility"""
warn('Deprecated; use `remove\' method', DeprecationWarning)
return self.remove(group, target)
|
{
"content_hash": "716d3dd42d979c68b6bcec1939d28dc9",
"timestamp": "",
"source": "github",
"line_count": 112,
"max_line_length": 89,
"avg_line_length": 41.214285714285715,
"alnum_prop": 0.5457105719237435,
"repo_name": "MediaMath/t1-python",
"id": "6da7cccacf9064b9c987d76af9c7b40d7f31d4a3",
"size": "4640",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "terminalone/models/targetdimension.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "238783"
}
],
"symlink_target": ""
}
|
import os
import tempfile
from django import forms
from django.core.files.storage import FileSystemStorage
from django.forms.formsets import formset_factory
from django.http import HttpResponse
from django.template import Template, Context
from django.contrib.auth.models import User
from django.contrib.formtools.wizard.views import NamedUrlWizardView
temp_storage_location = tempfile.mkdtemp(dir=os.environ.get('DJANGO_TEST_TEMP_DIR'))
temp_storage = FileSystemStorage(location=temp_storage_location)
class Page1(forms.Form):
name = forms.CharField(max_length=100)
user = forms.ModelChoiceField(queryset=User.objects.all())
thirsty = forms.NullBooleanField()
class Page2(forms.Form):
address1 = forms.CharField(max_length=100)
address2 = forms.CharField(max_length=100)
file1 = forms.FileField()
class Page3(forms.Form):
random_crap = forms.CharField(max_length=100)
Page4 = formset_factory(Page3, extra=2)
class ContactWizard(NamedUrlWizardView):
file_storage = temp_storage
def done(self, form_list, **kwargs):
c = Context({
'form_list': [x.cleaned_data for x in form_list],
'all_cleaned_data': self.get_all_cleaned_data()
})
for form in self.form_list.keys():
c[form] = self.get_cleaned_data_for_step(form)
c['this_will_fail'] = self.get_cleaned_data_for_step('this_will_fail')
return HttpResponse(Template('').render(c))
class SessionContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.session.SessionStorage'
class CookieContactWizard(ContactWizard):
storage_name = 'django.contrib.formtools.wizard.storage.cookie.CookieStorage'
|
{
"content_hash": "3bca2c2765ed56298364958c91c5e9a8",
"timestamp": "",
"source": "github",
"line_count": 51,
"max_line_length": 84,
"avg_line_length": 33.411764705882355,
"alnum_prop": 0.732981220657277,
"repo_name": "ericholscher/django",
"id": "c89f51e179cb041ebe084683fbfaee85c657bb00",
"size": "1704",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "django/contrib/formtools/tests/wizard/namedwizardtests/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "51177"
},
{
"name": "JavaScript",
"bytes": "102377"
},
{
"name": "Python",
"bytes": "9011891"
},
{
"name": "Shell",
"bytes": "12137"
}
],
"symlink_target": ""
}
|
import os
import numpy as np
from singa import device
from singa import tensor
from singa import sonnx
from singa import autograd
import onnx
import sys
sys.path.append(os.path.dirname(__file__) + '/..')
from utils import download_model, check_exist_or_download
import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)-15s %(message)s')
from transformers import RobertaTokenizer
tokenizer = RobertaTokenizer.from_pretrained('roberta-base')
def preprocess():
text = "This film is so good"
tokens = tokenizer.encode(text, add_special_tokens=True)
tokens = np.array(tokens)
return tokens.reshape([1, -1]).astype(np.float32)
class MyModel(sonnx.SONNXModel):
def __init__(self, onnx_model):
super(MyModel, self).__init__(onnx_model)
def forward(self, *x):
y = super(MyModel, self).forward(*x)
return y[0]
def train_one_batch(self, x, y):
pass
if __name__ == "__main__":
url = 'https://media.githubusercontent.com/media/onnx/models/master/text/machine_comprehension/roberta/model/roberta-sequence-classification-9.tar.gz'
download_dir = '/tmp/'
model_path = os.path.join(download_dir, 'roberta-sequence-classification-9', 'roberta-sequence-classification-9.onnx')
logging.info("onnx load model...")
download_model(url)
onnx_model = onnx.load(model_path)
# inference
logging.info("preprocessing...")
input_ids = preprocess()
logging.info("model compling...")
dev = device.get_default_device()
x = tensor.Tensor(device=dev, data=input_ids)
model = MyModel(onnx_model)
# verifty the test
# from utils import load_dataset
# sg_ir = sonnx.prepare(onnx_model) # run without graph
# inputs, ref_outputs = load_dataset(
# os.path.join('/tmp', 'roberta-sst-9', 'test_data_set_0'))
# outputs = sg_ir.run(inputs)
# for ref_o, o in zip(ref_outputs, outputs):
# np.testing.assert_almost_equal(ref_o, o, 4)
logging.info("model running...")
y = model.forward(x)
y = autograd.reshape(y, y.shape[-2:])[-1, :]
y = tensor.softmax(y)
y = tensor.to_numpy(y)[0]
y = np.argsort(y)[::-1]
if(y[0] == 0):
print("Prediction: negative")
else:
print("Prediction: positive")
|
{
"content_hash": "94c64874a6c742da862a9d932cc067ab",
"timestamp": "",
"source": "github",
"line_count": 76,
"max_line_length": 154,
"avg_line_length": 29.894736842105264,
"alnum_prop": 0.6584507042253521,
"repo_name": "apache/incubator-singa",
"id": "b6b02ed9ee394f846ebf83d5f8f439078afe7150",
"size": "3062",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "examples/onnx/ro_bert_a/ro_bert_a.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "C",
"bytes": "179197"
},
{
"name": "C++",
"bytes": "2270126"
},
{
"name": "CMake",
"bytes": "35412"
},
{
"name": "Cuda",
"bytes": "23993"
},
{
"name": "Dockerfile",
"bytes": "19274"
},
{
"name": "Java",
"bytes": "2578"
},
{
"name": "Python",
"bytes": "450209"
},
{
"name": "Shell",
"bytes": "11607"
}
],
"symlink_target": ""
}
|
import json
import struct
import re
import base64
import httplib
import sys
ERR_SLEEP = 15
MAX_NONCE = 1000000L
settings = {}
class AnoncoinRPC:
OBJID = 1
def __init__(self, host, port, username, password):
authpair = "%s:%s" % (username, password)
self.authhdr = "Basic %s" % (base64.b64encode(authpair))
self.conn = httplib.HTTPConnection(host, port, False, 30)
def rpc(self, method, params=None):
self.OBJID += 1
obj = { 'version' : '1.1',
'method' : method,
'id' : self.OBJID }
if params is None:
obj['params'] = []
else:
obj['params'] = params
self.conn.request('POST', '/', json.dumps(obj),
{ 'Authorization' : self.authhdr,
'Content-type' : 'application/json' })
resp = self.conn.getresponse()
if resp is None:
print "JSON-RPC: no response"
return None
body = resp.read()
resp_obj = json.loads(body)
if resp_obj is None:
print "JSON-RPC: cannot JSON-decode body"
return None
if 'error' in resp_obj and resp_obj['error'] != None:
return resp_obj['error']
if 'result' not in resp_obj:
print "JSON-RPC: no result in object"
return None
return resp_obj['result']
def getblock(self, hash, verbose=True):
return self.rpc('getblock', [hash, verbose])
def getblockhash(self, index):
return self.rpc('getblockhash', [index])
def getblock(rpc, settings, n):
hash = rpc.getblockhash(n)
hexdata = rpc.getblock(hash, False)
data = hexdata.decode('hex')
return data
def get_blocks(settings):
rpc = AnoncoinRPC(settings['host'], settings['port'],
settings['rpcuser'], settings['rpcpassword'])
outf = open(settings['output'], 'ab')
for height in xrange(settings['min_height'], settings['max_height']+1):
data = getblock(rpc, settings, height)
outhdr = settings['netmagic']
outhdr += struct.pack("<i", len(data))
outf.write(outhdr)
outf.write(data)
if (height % 1000) == 0:
sys.stdout.write("Wrote block " + str(height) + "\n")
if __name__ == '__main__':
if len(sys.argv) != 2:
print "Usage: linearize.py CONFIG-FILE"
sys.exit(1)
f = open(sys.argv[1])
for line in f:
# skip comment lines
m = re.search('^\s*#', line)
if m:
continue
# parse key=value lines
m = re.search('^(\w+)\s*=\s*(\S.*)$', line)
if m is None:
continue
settings[m.group(1)] = m.group(2)
f.close()
if 'netmagic' not in settings:
settings['netmagic'] = 'facabada'
if 'output' not in settings:
settings['output'] = 'bootstrap.dat'
if 'host' not in settings:
settings['host'] = '127.0.0.1'
if 'port' not in settings:
settings['port'] = 9376
if 'min_height' not in settings:
settings['min_height'] = 0
if 'max_height' not in settings:
settings['max_height'] = 301666
if 'rpcuser' not in settings or 'rpcpassword' not in settings:
print "Missing username and/or password in cfg file"
sys.exit(1)
settings['netmagic'] = settings['netmagic'].decode('hex')
settings['port'] = int(settings['port'])
settings['min_height'] = int(settings['min_height'])
settings['max_height'] = int(settings['max_height'])
get_blocks(settings)
|
{
"content_hash": "04a4015f7f492687460789bc3a1e3bd6",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 72,
"avg_line_length": 25.508333333333333,
"alnum_prop": 0.6524011760862464,
"repo_name": "GroundRod/anoncoin",
"id": "71efd49fbe7e142d2eb9b483f484979335d64abf",
"size": "3406",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "contrib/linearize/linearize.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "74673"
},
{
"name": "C++",
"bytes": "4412812"
},
{
"name": "CMake",
"bytes": "6390"
},
{
"name": "CSS",
"bytes": "1127"
},
{
"name": "Groff",
"bytes": "18043"
},
{
"name": "HTML",
"bytes": "50621"
},
{
"name": "Makefile",
"bytes": "69213"
},
{
"name": "Objective-C",
"bytes": "2216"
},
{
"name": "Objective-C++",
"bytes": "12884"
},
{
"name": "Protocol Buffer",
"bytes": "4622"
},
{
"name": "Python",
"bytes": "133893"
},
{
"name": "QMake",
"bytes": "2006"
},
{
"name": "Shell",
"bytes": "50962"
}
],
"symlink_target": ""
}
|
"""Test the config manager."""
import asyncio
from datetime import timedelta
import logging
from unittest.mock import AsyncMock, Mock, patch
import pytest
from homeassistant import config_entries, data_entry_flow, loader
from homeassistant.components.hassio import HassioServiceInfo
from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, EVENT_HOMEASSISTANT_STOP
from homeassistant.core import CoreState, callback
from homeassistant.data_entry_flow import RESULT_TYPE_ABORT, BaseServiceInfo
from homeassistant.exceptions import (
ConfigEntryAuthFailed,
ConfigEntryNotReady,
HomeAssistantError,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from homeassistant.setup import async_setup_component
from homeassistant.util import dt
from tests.common import (
MockConfigEntry,
MockEntity,
MockModule,
MockPlatform,
async_fire_time_changed,
mock_coro,
mock_entity_platform,
mock_integration,
mock_registry,
)
@pytest.fixture(autouse=True)
def mock_handlers():
"""Mock config flows."""
class MockFlowHandler(config_entries.ConfigFlow):
"""Define a mock flow handler."""
VERSION = 1
async def async_step_reauth(self, data):
"""Mock Reauth."""
return self.async_show_form(step_id="reauth")
with patch.dict(
config_entries.HANDLERS, {"comp": MockFlowHandler, "test": MockFlowHandler}
):
yield
@pytest.fixture
def manager(hass):
"""Fixture of a loaded config manager."""
manager = config_entries.ConfigEntries(hass, {})
manager._entries = {}
manager._store._async_ensure_stop_listener = lambda: None
hass.config_entries = manager
return manager
async def test_call_setup_entry(hass):
"""Test we call <component>.setup_entry."""
entry = MockConfigEntry(domain="comp")
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_setup_entry = AsyncMock(return_value=True)
mock_migrate_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
with patch("homeassistant.config_entries.support_entry_unload", return_value=True):
result = await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert result
assert len(mock_migrate_entry.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
assert entry.supports_unload
async def test_call_setup_entry_without_reload_support(hass):
"""Test we call <component>.setup_entry and the <component> does not support unloading."""
entry = MockConfigEntry(domain="comp")
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_setup_entry = AsyncMock(return_value=True)
mock_migrate_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
with patch("homeassistant.config_entries.support_entry_unload", return_value=False):
result = await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert result
assert len(mock_migrate_entry.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
assert not entry.supports_unload
async def test_call_async_migrate_entry(hass):
"""Test we call <component>.async_migrate_entry when version mismatch."""
entry = MockConfigEntry(domain="comp")
assert not entry.supports_unload
entry.version = 2
entry.add_to_hass(hass)
mock_migrate_entry = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
with patch("homeassistant.config_entries.support_entry_unload", return_value=True):
result = await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
assert entry.supports_unload
async def test_call_async_migrate_entry_failure_false(hass):
"""Test migration fails if returns false."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_migrate_entry = AsyncMock(return_value=False)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR
assert not entry.supports_unload
async def test_call_async_migrate_entry_failure_exception(hass):
"""Test migration fails if exception raised."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_migrate_entry = AsyncMock(side_effect=Exception)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR
assert not entry.supports_unload
async def test_call_async_migrate_entry_failure_not_bool(hass):
"""Test migration fails if boolean not returned."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_migrate_entry = AsyncMock(return_value=None)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_migrate_entry=mock_migrate_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_migrate_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR
assert not entry.supports_unload
async def test_call_async_migrate_entry_failure_not_supported(hass):
"""Test migration fails if async_migrate_entry not implemented."""
entry = MockConfigEntry(domain="comp")
entry.version = 2
entry.add_to_hass(hass)
assert not entry.supports_unload
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
result = await async_setup_component(hass, "comp", {})
assert result
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.MIGRATION_ERROR
assert not entry.supports_unload
async def test_remove_entry(hass, manager):
"""Test that we can remove an entry."""
async def mock_setup_entry(hass, entry):
"""Mock setting up entry."""
hass.config_entries.async_setup_platforms(entry, ["light"])
return True
async def mock_unload_entry(hass, entry):
"""Mock unloading an entry."""
result = await hass.config_entries.async_unload_platforms(entry, ["light"])
assert result
return result
mock_remove_entry = AsyncMock(return_value=None)
entity = MockEntity(unique_id="1234", name="Test Entity")
async def mock_setup_entry_platform(hass, entry, async_add_entities):
"""Mock setting up platform."""
async_add_entities([entity])
mock_integration(
hass,
MockModule(
"test",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
async_remove_entry=mock_remove_entry,
),
)
mock_entity_platform(
hass, "light.test", MockPlatform(async_setup_entry=mock_setup_entry_platform)
)
mock_entity_platform(hass, "config_flow.test", None)
MockConfigEntry(domain="test_other", entry_id="test1").add_to_manager(manager)
entry = MockConfigEntry(domain="test", entry_id="test2")
entry.add_to_manager(manager)
MockConfigEntry(domain="test_other", entry_id="test3").add_to_manager(manager)
# Check all config entries exist
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
# Setup entry
await entry.async_setup(hass)
await hass.async_block_till_done()
# Check entity state got added
assert hass.states.get("light.test_entity") is not None
assert len(hass.states.async_all()) == 1
# Check entity got added to entity registry
ent_reg = er.async_get(hass)
assert len(ent_reg.entities) == 1
entity_entry = list(ent_reg.entities.values())[0]
assert entity_entry.config_entry_id == entry.entry_id
# Remove entry
result = await manager.async_remove("test2")
await hass.async_block_till_done()
# Check that unload went well and so no need to restart
assert result == {"require_restart": False}
# Check the remove callback was invoked.
assert mock_remove_entry.call_count == 1
# Check that config entry was removed.
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
# Check that entity state has been removed
assert hass.states.get("light.test_entity") is None
assert len(hass.states.async_all()) == 0
# Check that entity registry entry has been removed
entity_entry_list = list(ent_reg.entities.values())
assert not entity_entry_list
async def test_remove_entry_cancels_reauth(hass, manager):
"""Tests that removing a config entry, also aborts existing reauth flows."""
entry = MockConfigEntry(title="test_title", domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryAuthFailed())
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
entry.add_to_hass(hass)
await entry.async_setup(hass)
await hass.async_block_till_done()
flows = hass.config_entries.flow.async_progress_by_handler("test")
assert len(flows) == 1
assert flows[0]["context"]["entry_id"] == entry.entry_id
assert flows[0]["context"]["source"] == config_entries.SOURCE_REAUTH
assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR
await manager.async_remove(entry.entry_id)
flows = hass.config_entries.flow.async_progress_by_handler("test")
assert len(flows) == 0
async def test_remove_entry_handles_callback_error(hass, manager):
"""Test that exceptions in the remove callback are handled."""
mock_setup_entry = AsyncMock(return_value=True)
mock_unload_entry = AsyncMock(return_value=True)
mock_remove_entry = AsyncMock(return_value=None)
mock_integration(
hass,
MockModule(
"test",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
async_remove_entry=mock_remove_entry,
),
)
entry = MockConfigEntry(domain="test", entry_id="test1")
entry.add_to_manager(manager)
# Check all config entries exist
assert [item.entry_id for item in manager.async_entries()] == ["test1"]
# Setup entry
await entry.async_setup(hass)
await hass.async_block_till_done()
# Remove entry
result = await manager.async_remove("test1")
await hass.async_block_till_done()
# Check that unload went well and so no need to restart
assert result == {"require_restart": False}
# Check the remove callback was invoked.
assert mock_remove_entry.call_count == 1
# Check that config entry was removed.
assert [item.entry_id for item in manager.async_entries()] == []
async def test_remove_entry_raises(hass, manager):
"""Test if a component raises while removing entry."""
async def mock_unload_entry(hass, entry):
"""Mock unload entry function."""
raise Exception("BROKEN")
mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry))
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(
domain="comp", entry_id="test2", state=config_entries.ConfigEntryState.LOADED
).add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": True}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
async def test_remove_entry_if_not_loaded(hass, manager):
"""Test that we can remove an entry that is not loaded."""
mock_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry))
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(domain="comp", entry_id="test2").add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": False}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
assert len(mock_unload_entry.mock_calls) == 0
async def test_remove_entry_if_integration_deleted(hass, manager):
"""Test that we can remove an entry when the integration is deleted."""
mock_unload_entry = AsyncMock(return_value=True)
MockConfigEntry(domain="test", entry_id="test1").add_to_manager(manager)
MockConfigEntry(domain="comp", entry_id="test2").add_to_manager(manager)
MockConfigEntry(domain="test", entry_id="test3").add_to_manager(manager)
assert [item.entry_id for item in manager.async_entries()] == [
"test1",
"test2",
"test3",
]
result = await manager.async_remove("test2")
assert result == {"require_restart": False}
assert [item.entry_id for item in manager.async_entries()] == ["test1", "test3"]
assert len(mock_unload_entry.mock_calls) == 0
async def test_add_entry_calls_setup_entry(hass, manager):
"""Test we call setup_config_entry."""
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.data == {"token": "supersecret"}
async def test_entries_gets_entries(manager):
"""Test entries are filtered by domain."""
MockConfigEntry(domain="test").add_to_manager(manager)
entry1 = MockConfigEntry(domain="test2")
entry1.add_to_manager(manager)
entry2 = MockConfigEntry(domain="test2")
entry2.add_to_manager(manager)
assert manager.async_entries("test2") == [entry1, entry2]
async def test_domains_gets_domains_uniques(manager):
"""Test we only return each domain once."""
MockConfigEntry(domain="test").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(domain="test").add_to_manager(manager)
MockConfigEntry(domain="test3").add_to_manager(manager)
assert manager.async_domains() == ["test", "test2", "test3"]
async def test_domains_gets_domains_excludes_ignore_and_disabled(manager):
"""Test we only return each domain once."""
MockConfigEntry(domain="test").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(domain="test2").add_to_manager(manager)
MockConfigEntry(
domain="ignored", source=config_entries.SOURCE_IGNORE
).add_to_manager(manager)
MockConfigEntry(domain="test3").add_to_manager(manager)
MockConfigEntry(
domain="disabled", disabled_by=config_entries.ConfigEntryDisabler.USER
).add_to_manager(manager)
assert manager.async_domains() == ["test", "test2", "test3"]
assert manager.async_domains(include_ignore=False) == ["test", "test2", "test3"]
assert manager.async_domains(include_disabled=False) == ["test", "test2", "test3"]
assert manager.async_domains(include_ignore=False, include_disabled=False) == [
"test",
"test2",
"test3",
]
assert manager.async_domains(include_ignore=True) == [
"test",
"test2",
"ignored",
"test3",
]
assert manager.async_domains(include_disabled=True) == [
"test",
"test2",
"test3",
"disabled",
]
assert manager.async_domains(include_ignore=True, include_disabled=True) == [
"test",
"test2",
"ignored",
"test3",
"disabled",
]
async def test_saving_and_loading(hass):
"""Test that we're saving and loading correctly."""
mock_integration(
hass, MockModule("test", async_setup_entry=lambda *args: mock_coro(True))
)
mock_entity_platform(hass, "config_flow.test", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 5
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("unique")
return self.async_create_entry(title="Test Title", data={"token": "abcd"})
with patch.dict(config_entries.HANDLERS, {"test": TestFlow}):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
)
class Test2Flow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 3
async def async_step_user(self, user_input=None):
"""Test user step."""
return self.async_create_entry(
title="Test 2 Title", data={"username": "bla"}
)
with patch("homeassistant.config_entries.HANDLERS.get", return_value=Test2Flow):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
)
assert len(hass.config_entries.async_entries()) == 2
entry_1 = hass.config_entries.async_entries()[0]
hass.config_entries.async_update_entry(
entry_1,
pref_disable_polling=True,
pref_disable_new_entities=True,
)
# To trigger the call_later
async_fire_time_changed(hass, dt.utcnow() + timedelta(seconds=1))
# To execute the save
await hass.async_block_till_done()
# Now load written data in new config manager
manager = config_entries.ConfigEntries(hass, {})
await manager.async_initialize()
assert len(manager.async_entries()) == 2
# Ensure same order
for orig, loaded in zip(
hass.config_entries.async_entries(), manager.async_entries()
):
assert orig.version == loaded.version
assert orig.domain == loaded.domain
assert orig.title == loaded.title
assert orig.data == loaded.data
assert orig.source == loaded.source
assert orig.unique_id == loaded.unique_id
assert orig.pref_disable_new_entities == loaded.pref_disable_new_entities
assert orig.pref_disable_polling == loaded.pref_disable_polling
async def test_forward_entry_sets_up_component(hass):
"""Test we setup the component entry is forwarded to."""
entry = MockConfigEntry(domain="original")
mock_original_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass, MockModule("original", async_setup_entry=mock_original_setup_entry)
)
mock_forwarded_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass, MockModule("forwarded", async_setup_entry=mock_forwarded_setup_entry)
)
await hass.config_entries.async_forward_entry_setup(entry, "forwarded")
assert len(mock_original_setup_entry.mock_calls) == 0
assert len(mock_forwarded_setup_entry.mock_calls) == 1
async def test_forward_entry_does_not_setup_entry_if_setup_fails(hass):
"""Test we do not set up entry if component setup fails."""
entry = MockConfigEntry(domain="original")
mock_setup = AsyncMock(return_value=False)
mock_setup_entry = AsyncMock()
mock_integration(
hass,
MockModule(
"forwarded", async_setup=mock_setup, async_setup_entry=mock_setup_entry
),
)
await hass.config_entries.async_forward_entry_setup(entry, "forwarded")
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 0
async def test_discovery_notification(hass):
"""Test that we create/dismiss a notification when source is discovery."""
mock_integration(hass, MockModule("test"))
mock_entity_platform(hass, "config_flow.test", None)
with patch.dict(config_entries.HANDLERS):
class TestFlow(config_entries.ConfigFlow, domain="test"):
"""Test flow."""
VERSION = 5
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
return self.async_show_form(step_id="discovery_confirm")
async def async_step_discovery_confirm(self, discovery_info):
"""Test discovery confirm step."""
return self.async_create_entry(
title="Test Title", data={"token": "abcd"}
)
# Start first discovery flow to assert that reconfigure notification fires
flow1 = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is not None
# Start a second discovery flow so we can finish the first and assert that
# the discovery notification persists until the second one is complete
flow2 = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}
)
flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {})
assert flow1["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is not None
flow2 = await hass.config_entries.flow.async_configure(flow2["flow_id"], {})
assert flow2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
async def test_reauth_notification(hass):
"""Test that we create/dismiss a notification when source is reauth."""
mock_integration(hass, MockModule("test"))
mock_entity_platform(hass, "config_flow.test", None)
with patch.dict(config_entries.HANDLERS):
class TestFlow(config_entries.ConfigFlow, domain="test"):
"""Test flow."""
VERSION = 5
async def async_step_user(self, user_input):
"""Test user step."""
return self.async_show_form(step_id="user_confirm")
async def async_step_user_confirm(self, user_input):
"""Test user confirm step."""
return self.async_show_form(step_id="user_confirm")
async def async_step_reauth(self, user_input):
"""Test reauth step."""
return self.async_show_form(step_id="reauth_confirm")
async def async_step_reauth_confirm(self, user_input):
"""Test reauth confirm step."""
return self.async_abort(reason="test")
# Start user flow to assert that reconfigure notification doesn't fire
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_reconfigure")
assert state is None
# Start first reauth flow to assert that reconfigure notification fires
flow1 = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_REAUTH}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_reconfigure")
assert state is not None
# Start a second reauth flow so we can finish the first and assert that
# the reconfigure notification persists until the second one is complete
flow2 = await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_REAUTH}
)
flow1 = await hass.config_entries.flow.async_configure(flow1["flow_id"], {})
assert flow1["type"] == data_entry_flow.RESULT_TYPE_ABORT
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_reconfigure")
assert state is not None
flow2 = await hass.config_entries.flow.async_configure(flow2["flow_id"], {})
assert flow2["type"] == data_entry_flow.RESULT_TYPE_ABORT
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_reconfigure")
assert state is None
async def test_discovery_notification_not_created(hass):
"""Test that we not create a notification when discovery is aborted."""
mock_integration(hass, MockModule("test"))
mock_entity_platform(hass, "config_flow.test", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 5
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
return self.async_abort(reason="test")
with patch.dict(config_entries.HANDLERS, {"test": TestFlow}):
await hass.config_entries.flow.async_init(
"test", context={"source": config_entries.SOURCE_DISCOVERY}
)
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
async def test_loading_default_config(hass):
"""Test loading the default config."""
manager = config_entries.ConfigEntries(hass, {})
with patch("homeassistant.util.json.open", side_effect=FileNotFoundError):
await manager.async_initialize()
assert len(manager.async_entries()) == 0
async def test_updating_entry_data(manager):
"""Test that we can update an entry data."""
entry = MockConfigEntry(
domain="test",
data={"first": True},
state=config_entries.ConfigEntryState.SETUP_ERROR,
)
entry.add_to_manager(manager)
assert manager.async_update_entry(entry) is False
assert entry.data == {"first": True}
assert manager.async_update_entry(entry, data={"second": True}) is True
assert entry.data == {"second": True}
async def test_updating_entry_system_options(manager):
"""Test that we can update an entry data."""
entry = MockConfigEntry(
domain="test",
data={"first": True},
state=config_entries.ConfigEntryState.SETUP_ERROR,
pref_disable_new_entities=True,
)
entry.add_to_manager(manager)
assert entry.pref_disable_new_entities is True
assert entry.pref_disable_polling is False
manager.async_update_entry(
entry, pref_disable_new_entities=False, pref_disable_polling=True
)
assert entry.pref_disable_new_entities is False
assert entry.pref_disable_polling is True
async def test_update_entry_options_and_trigger_listener(hass, manager):
"""Test that we can update entry options and trigger listener."""
entry = MockConfigEntry(domain="test", options={"first": True})
entry.add_to_manager(manager)
async def update_listener(hass, entry):
"""Test function."""
assert entry.options == {"second": True}
entry.add_update_listener(update_listener)
assert manager.async_update_entry(entry, options={"second": True}) is True
assert entry.options == {"second": True}
async def test_setup_raise_not_ready(hass, caplog):
"""Test a setup raising not ready."""
entry = MockConfigEntry(title="test_title", domain="test")
mock_setup_entry = AsyncMock(
side_effect=ConfigEntryNotReady("The internet connection is offline")
)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.config_entries.async_call_later") as mock_call:
await entry.async_setup(hass)
assert len(mock_call.mock_calls) == 1
assert (
"Config entry 'test_title' for test integration not ready yet: The internet connection is offline"
in caplog.text
)
p_hass, p_wait_time, p_setup = mock_call.mock_calls[0][1]
assert p_hass is hass
assert p_wait_time == 5
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert entry.reason == "The internet connection is offline"
mock_setup_entry.side_effect = None
mock_setup_entry.return_value = True
await p_setup(None)
assert entry.state is config_entries.ConfigEntryState.LOADED
assert entry.reason is None
async def test_setup_raise_not_ready_from_exception(hass, caplog):
"""Test a setup raising not ready from another exception."""
entry = MockConfigEntry(title="test_title", domain="test")
original_exception = HomeAssistantError("The device dropped the connection")
config_entry_exception = ConfigEntryNotReady()
config_entry_exception.__cause__ = original_exception
mock_setup_entry = AsyncMock(side_effect=config_entry_exception)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.config_entries.async_call_later") as mock_call:
await entry.async_setup(hass)
assert len(mock_call.mock_calls) == 1
assert (
"Config entry 'test_title' for test integration not ready yet: The device dropped the connection"
in caplog.text
)
async def test_setup_retrying_during_unload(hass):
"""Test if we unload an entry that is in retry mode."""
entry = MockConfigEntry(domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.config_entries.async_call_later") as mock_call:
await entry.async_setup(hass)
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert len(mock_call.return_value.mock_calls) == 0
await entry.async_unload(hass)
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
assert len(mock_call.return_value.mock_calls) == 1
async def test_setup_retrying_during_unload_before_started(hass):
"""Test if we unload an entry that is in retry mode before started."""
entry = MockConfigEntry(domain="test")
hass.state = CoreState.starting
initial_listeners = hass.bus.async_listeners()[EVENT_HOMEASSISTANT_STARTED]
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
await entry.async_setup(hass)
await hass.async_block_till_done()
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert (
hass.bus.async_listeners()[EVENT_HOMEASSISTANT_STARTED] == initial_listeners + 1
)
await entry.async_unload(hass)
await hass.async_block_till_done()
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
assert (
hass.bus.async_listeners()[EVENT_HOMEASSISTANT_STARTED] == initial_listeners + 0
)
async def test_create_entry_options(hass):
"""Test a config entry being created with options."""
async def mock_async_setup(hass, config):
"""Mock setup."""
hass.async_create_task(
hass.config_entries.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IMPORT},
data={"data": "data", "option": "option"},
)
)
return True
async_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry
),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input):
"""Test import step creating entry, with options."""
return self.async_create_entry(
title="title",
data={"example": user_input["data"]},
options={"example": user_input["option"]},
)
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
assert await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert len(async_setup_entry.mock_calls) == 1
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].data == {"example": "data"}
assert entries[0].options == {"example": "option"}
async def test_entry_options(hass, manager):
"""Test that we can set options on an entry."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_manager(manager)
class TestFlow:
"""Test flow."""
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Test options flow."""
class OptionsFlowHandler(data_entry_flow.FlowHandler):
"""Test options flow handler."""
return OptionsFlowHandler()
config_entries.HANDLERS["test"] = TestFlow()
flow = await manager.options.async_create_flow(
entry.entry_id, context={"source": "test"}, data=None
)
flow.handler = entry.entry_id # Used to keep reference to config entry
await manager.options.async_finish_flow(
flow,
{"data": {"second": True}, "type": data_entry_flow.RESULT_TYPE_CREATE_ENTRY},
)
assert entry.data == {"first": True}
assert entry.options == {"second": True}
async def test_entry_options_abort(hass, manager):
"""Test that we can abort options flow."""
entry = MockConfigEntry(domain="test", data={"first": True}, options=None)
entry.add_to_manager(manager)
class TestFlow:
"""Test flow."""
@staticmethod
@callback
def async_get_options_flow(config_entry):
"""Test options flow."""
class OptionsFlowHandler(data_entry_flow.FlowHandler):
"""Test options flow handler."""
return OptionsFlowHandler()
config_entries.HANDLERS["test"] = TestFlow()
flow = await manager.options.async_create_flow(
entry.entry_id, context={"source": "test"}, data=None
)
flow.handler = entry.entry_id # Used to keep reference to config entry
assert await manager.options.async_finish_flow(
flow, {"type": data_entry_flow.RESULT_TYPE_ABORT, "reason": "test"}
)
async def test_entry_setup_succeed(hass, manager):
"""Test that we can setup an entry."""
entry = MockConfigEntry(
domain="comp", state=config_entries.ConfigEntryState.NOT_LOADED
)
entry.add_to_hass(hass)
mock_setup = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule("comp", async_setup=mock_setup, async_setup_entry=mock_setup_entry),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_setup(entry.entry_id)
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ConfigEntryState.LOADED,
config_entries.ConfigEntryState.SETUP_ERROR,
config_entries.ConfigEntryState.MIGRATION_ERROR,
config_entries.ConfigEntryState.SETUP_RETRY,
config_entries.ConfigEntryState.FAILED_UNLOAD,
),
)
async def test_entry_setup_invalid_state(hass, manager, state):
"""Test that we cannot setup an entry with invalid state."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
mock_setup = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule("comp", async_setup=mock_setup, async_setup_entry=mock_setup_entry),
)
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_setup(entry.entry_id)
assert len(mock_setup.mock_calls) == 0
assert len(mock_setup_entry.mock_calls) == 0
assert entry.state is state
async def test_entry_unload_succeed(hass, manager):
"""Test that we can unload an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ConfigEntryState.LOADED)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ConfigEntryState.NOT_LOADED,
config_entries.ConfigEntryState.SETUP_ERROR,
config_entries.ConfigEntryState.SETUP_RETRY,
),
)
async def test_entry_unload_failed_to_load(hass, manager, state):
"""Test that we can unload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ConfigEntryState.MIGRATION_ERROR,
config_entries.ConfigEntryState.FAILED_UNLOAD,
),
)
async def test_entry_unload_invalid_state(hass, manager, state):
"""Test that we cannot unload an entry with invalid state."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_unload_entry=async_unload_entry))
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_unload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert entry.state is state
async def test_entry_reload_succeed(hass, manager):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ConfigEntryState.LOADED)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 1
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ConfigEntryState.NOT_LOADED,
config_entries.ConfigEntryState.SETUP_ERROR,
config_entries.ConfigEntryState.SETUP_RETRY,
),
)
async def test_entry_reload_not_loaded(hass, manager, state):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
@pytest.mark.parametrize(
"state",
(
config_entries.ConfigEntryState.MIGRATION_ERROR,
config_entries.ConfigEntryState.FAILED_UNLOAD,
),
)
async def test_entry_reload_error(hass, manager, state):
"""Test that we can reload an entry."""
entry = MockConfigEntry(domain="comp", state=state)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
with pytest.raises(config_entries.OperationNotAllowed):
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 0
assert len(async_setup.mock_calls) == 0
assert len(async_setup_entry.mock_calls) == 0
assert entry.state == state
async def test_entry_disable_succeed(hass, manager):
"""Test that we can disable an entry."""
entry = MockConfigEntry(domain="comp", state=config_entries.ConfigEntryState.LOADED)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
# Disable
assert await manager.async_set_disabled_by(
entry.entry_id, config_entries.ConfigEntryDisabler.USER
)
assert len(async_unload_entry.mock_calls) == 1
assert len(async_setup.mock_calls) == 0
assert len(async_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.NOT_LOADED
# Enable
assert await manager.async_set_disabled_by(entry.entry_id, None)
assert len(async_unload_entry.mock_calls) == 1
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
async def test_entry_disable_without_reload_support(hass, manager):
"""Test that we can disable an entry without reload support."""
entry = MockConfigEntry(domain="comp", state=config_entries.ConfigEntryState.LOADED)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
# Disable
assert not await manager.async_set_disabled_by(
entry.entry_id, config_entries.ConfigEntryDisabler.USER
)
assert len(async_setup.mock_calls) == 0
assert len(async_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.FAILED_UNLOAD
# Enable
with pytest.raises(config_entries.OperationNotAllowed):
await manager.async_set_disabled_by(entry.entry_id, None)
assert len(async_setup.mock_calls) == 0
assert len(async_setup_entry.mock_calls) == 0
assert entry.state is config_entries.ConfigEntryState.FAILED_UNLOAD
async def test_entry_enable_without_reload_support(hass, manager):
"""Test that we can disable an entry without reload support."""
entry = MockConfigEntry(
domain="comp", disabled_by=config_entries.ConfigEntryDisabler.USER
)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
async_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
# Enable
assert await manager.async_set_disabled_by(entry.entry_id, None)
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
# Disable
assert not await manager.async_set_disabled_by(
entry.entry_id, config_entries.ConfigEntryDisabler.USER
)
assert len(async_setup.mock_calls) == 1
assert len(async_setup_entry.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.FAILED_UNLOAD
async def test_init_custom_integration(hass):
"""Test initializing flow for custom integration."""
integration = loader.Integration(
hass,
"custom_components.hue",
None,
{"name": "Hue", "dependencies": [], "requirements": [], "domain": "hue"},
)
with pytest.raises(data_entry_flow.UnknownHandler), patch(
"homeassistant.loader.async_get_integration",
return_value=integration,
):
await hass.config_entries.flow.async_init("bla")
async def test_support_entry_unload(hass):
"""Test unloading entry."""
assert await config_entries.support_entry_unload(hass, "light")
assert not await config_entries.support_entry_unload(hass, "auth")
async def test_reload_entry_entity_registry_ignores_no_entry(hass):
"""Test reloading entry in entity registry skips if no config entry linked."""
handler = config_entries.EntityRegistryDisabledHandler(hass)
registry = mock_registry(hass)
# Test we ignore entities without config entry
entry = registry.async_get_or_create("light", "hue", "123")
registry.async_update_entity(
entry.entity_id, disabled_by=er.RegistryEntryDisabler.USER
)
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
async def test_reload_entry_entity_registry_works(hass):
"""Test we schedule an entry to be reloaded if disabled_by is updated."""
handler = config_entries.EntityRegistryDisabledHandler(hass)
handler.async_setup()
registry = mock_registry(hass)
config_entry = MockConfigEntry(
domain="comp", state=config_entries.ConfigEntryState.LOADED
)
config_entry.supports_unload = True
config_entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=mock_setup_entry,
async_unload_entry=mock_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
# Only changing disabled_by should update trigger
entity_entry = registry.async_get_or_create(
"light", "hue", "123", config_entry=config_entry
)
registry.async_update_entity(entity_entry.entity_id, name="yo")
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
# Disable entity, we should not do anything, only act when enabled.
registry.async_update_entity(
entity_entry.entity_id, disabled_by=er.RegistryEntryDisabler.USER
)
await hass.async_block_till_done()
assert not handler.changed
assert handler._remove_call_later is None
# Enable entity, check we are reloading config entry.
registry.async_update_entity(entity_entry.entity_id, disabled_by=None)
await hass.async_block_till_done()
assert handler.changed == {config_entry.entry_id}
assert handler._remove_call_later is not None
async_fire_time_changed(
hass,
dt.utcnow() + timedelta(seconds=config_entries.RELOAD_AFTER_UPDATE_DELAY + 1),
)
await hass.async_block_till_done()
assert len(mock_unload_entry.mock_calls) == 1
async def test_unique_id_persisted(hass, manager):
"""Test that a unique ID is stored in the config entry."""
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
return self.async_create_entry(title="mock-title", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.unique_id == "mock-unique-id"
async def test_unique_id_existing_entry(hass, manager):
"""Test that we remove an entry if there already is an entry with unique ID."""
hass.config.components.add("comp")
MockConfigEntry(
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
unique_id="mock-unique-id",
).add_to_hass(hass)
async_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
async_remove_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup_entry=async_setup_entry,
async_unload_entry=async_unload_entry,
async_remove_entry=async_remove_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
existing_entry = await self.async_set_unique_id("mock-unique-id")
assert existing_entry is not None
return self.async_create_entry(title="mock-title", data={"via": "flow"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].data == {"via": "flow"}
assert len(async_setup_entry.mock_calls) == 1
assert len(async_unload_entry.mock_calls) == 1
assert len(async_remove_entry.mock_calls) == 1
async def test_entry_id_existing_entry(hass, manager):
"""Test that we throw when the entry id collides."""
collide_entry_id = "collide"
hass.config.components.add("comp")
MockConfigEntry(
entry_id=collide_entry_id,
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
unique_id="mock-unique-id",
).add_to_hass(hass)
mock_integration(
hass,
MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
return self.async_create_entry(title="mock-title", data={"via": "flow"})
with pytest.raises(HomeAssistantError), patch.dict(
config_entries.HANDLERS, {"comp": TestFlow}
), patch(
"homeassistant.config_entries.uuid_util.random_uuid_hex",
return_value=collide_entry_id,
):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
async def test_unique_id_update_existing_entry_without_reload(hass, manager):
"""Test that we update an entry if there already is an entry with unique ID."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
state=config_entries.ConfigEntryState.LOADED,
)
entry.add_to_hass(hass)
mock_integration(
hass,
MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
await self._abort_if_unique_id_configured(
updates={"host": "1.1.1.1"}, reload_on_update=False
)
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
assert entry.data["additional"] == "data"
assert len(async_reload.mock_calls) == 0
async def test_unique_id_update_existing_entry_with_reload(hass, manager):
"""Test that we update an entry if there already is an entry with unique ID and we reload on changes."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
state=config_entries.ConfigEntryState.LOADED,
)
entry.add_to_hass(hass)
mock_integration(
hass,
MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
updates = {"host": "1.1.1.1"}
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
await self._abort_if_unique_id_configured(
updates=updates, reload_on_update=True
)
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert entry.data["host"] == "1.1.1.1"
assert entry.data["additional"] == "data"
assert len(async_reload.mock_calls) == 1
# Test we don't reload if entry not started
updates["host"] = "2.2.2.2"
entry.state = config_entries.ConfigEntryState.NOT_LOADED
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert entry.data["host"] == "2.2.2.2"
assert entry.data["additional"] == "data"
assert len(async_reload.mock_calls) == 0
async def test_unique_id_not_update_existing_entry(hass, manager):
"""Test that we do not update an entry if existing entry has the data."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
)
entry.add_to_hass(hass)
mock_integration(
hass,
MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
await self._abort_if_unique_id_configured(
updates={"host": "0.0.0.0"}, reload_on_update=True
)
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
assert entry.data["host"] == "0.0.0.0"
assert entry.data["additional"] == "data"
assert len(async_reload.mock_calls) == 0
async def test_unique_id_in_progress(hass, manager):
"""Test that we abort if there is already a flow in progress with same unique id."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Will be canceled
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result2["reason"] == "already_in_progress"
async def test_finish_flow_aborts_progress(hass, manager):
"""Test that when finishing a flow, we abort other flows in progress with unique ID."""
mock_integration(
hass,
MockModule("comp", async_setup_entry=AsyncMock(return_value=True)),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id", raise_on_progress=False)
if user_input is None:
return self.async_show_form(step_id="discovery")
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Will finish and cancel other one.
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_unique_id_ignore(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID."""
async_setup_entry = AsyncMock(return_value=False)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user flow."""
await self.async_set_unique_id("mock-unique-id")
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
result2 = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id", "title": "Ignored Title"},
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# assert len(hass.config_entries.flow.async_progress()) == 0
# We should never set up an ignored entry.
assert len(async_setup_entry.mock_calls) == 0
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.title == "Ignored Title"
async def test_manual_add_overrides_ignored_entry(hass, manager):
"""Test that we can ignore manually add entry, overriding ignored entry."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
data={"additional": "data", "host": "0.0.0.0"},
unique_id="mock-unique-id",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
mock_integration(
hass,
MockModule("comp"),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
await self.async_set_unique_id("mock-unique-id")
self._abort_if_unique_id_configured(
updates={"host": "1.1.1.1"}, reload_on_update=False
)
return self.async_show_form(step_id="step2")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}), patch(
"homeassistant.config_entries.ConfigEntries.async_reload"
) as async_reload:
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert entry.data["host"] == "1.1.1.1"
assert entry.data["additional"] == "data"
assert len(async_reload.mock_calls) == 0
async def test_manual_add_overrides_ignored_entry_singleton(hass, manager):
"""Test that we can ignore manually add entry, overriding ignored entry."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.data == {"token": "supersecret"}
async def test__async_current_entries_does_not_skip_ignore_non_user(hass, manager):
"""Test that _async_current_entries does not skip ignore by default for non user step."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input=None):
"""Test not the user step."""
if self._async_current_entries():
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_IMPORT}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 0
async def test__async_current_entries_explicit_skip_ignore(hass, manager):
"""Test that _async_current_entries can explicitly include ignore."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input=None):
"""Test not the user step."""
if self._async_current_entries(include_ignore=False):
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_IMPORT}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
p_hass, p_entry = mock_setup_entry.mock_calls[0][1]
assert p_hass is hass
assert p_entry.data == {"token": "supersecret"}
async def test__async_current_entries_explicit_include_ignore(hass, manager):
"""Test that _async_current_entries can explicitly include ignore."""
hass.config.components.add("comp")
entry = MockConfigEntry(
domain="comp",
state=config_entries.ConfigEntryState.LOADED,
source=config_entries.SOURCE_IGNORE,
)
entry.add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input=None):
"""Test not the user step."""
if self._async_current_entries(include_ignore=True):
return self.async_abort(reason="single_instance_allowed")
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_IMPORT}
)
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 0
async def test_unignore_step_form(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID, then rediscover them."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_unignore(self, user_input):
"""Test unignore step."""
unique_id = user_input["unique_id"]
await self.async_set_unique_id(unique_id)
return self.async_show_form(step_id="discovery")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id", "title": "Ignored Title"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
assert entry.title == "Ignored Title"
await manager.async_remove(entry.entry_id)
# Right after removal there shouldn't be an entry or active flows
assert len(hass.config_entries.async_entries("comp")) == 0
assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 0
# But after a 'tick' the unignore step has run and we can see an active flow again.
await hass.async_block_till_done()
assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 1
# and still not config entries
assert len(hass.config_entries.async_entries("comp")) == 0
async def test_unignore_create_entry(hass, manager):
"""Test that we can ignore flows that are in progress and have a unique ID, then rediscover them."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_unignore(self, user_input):
"""Test unignore step."""
unique_id = user_input["unique_id"]
await self.async_set_unique_id(unique_id)
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id", "title": "Ignored Title"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
assert entry.title == "Ignored Title"
await manager.async_remove(entry.entry_id)
# Right after removal there shouldn't be an entry or flow
assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 0
assert len(hass.config_entries.async_entries("comp")) == 0
# But after a 'tick' the unignore step has run and we can see a config entry.
await hass.async_block_till_done()
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == config_entries.SOURCE_UNIGNORE
assert entry.unique_id == "mock-unique-id"
assert entry.title == "yo"
# And still no active flow
assert len(hass.config_entries.flow.async_progress_by_handler("comp")) == 0
async def test_unignore_default_impl(hass, manager):
"""Test that resdicovery is a no-op by default."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IGNORE},
data={"unique_id": "mock-unique-id", "title": "Ignored Title"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
entry = hass.config_entries.async_entries("comp")[0]
assert entry.source == "ignore"
assert entry.unique_id == "mock-unique-id"
assert entry.domain == "comp"
assert entry.title == "Ignored Title"
await manager.async_remove(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries("comp")) == 0
assert len(hass.config_entries.flow.async_progress()) == 0
async def test_partial_flows_hidden(hass, manager):
"""Test that flows that don't have a cur_step and haven't finished initing are hidden."""
async_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
# A flag to test our assertion that `async_step_discovery` was called and is in its blocked state
# This simulates if the step was e.g. doing network i/o
discovery_started = asyncio.Event()
# A flag to allow `async_step_discovery` to resume after we have verified the uninited flow is not
# visible and has not triggered a discovery alert. This lets us control when the mocked network
# i/o is complete.
pause_discovery = asyncio.Event()
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
discovery_started.set()
await pause_discovery.wait()
return self.async_show_form(step_id="someform")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Start a config entry flow and wait for it to be blocked
init_task = asyncio.ensure_future(
manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
)
await discovery_started.wait()
# While it's blocked it shouldn't be visible or trigger discovery notifications
assert len(hass.config_entries.flow.async_progress()) == 0
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is None
# Let the flow init complete
pause_discovery.set()
# When it's complete it should now be visible in async_progress and have triggered
# discovery notifications
result = await init_task
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert len(hass.config_entries.flow.async_progress()) == 1
await hass.async_block_till_done()
state = hass.states.get("persistent_notification.config_entry_discovery")
assert state is not None
async def test_async_setup_init_entry(hass):
"""Test a config entry being initialized during integration setup."""
async def mock_async_setup(hass, config):
"""Mock setup."""
hass.async_create_task(
hass.config_entries.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IMPORT},
data={},
)
)
return True
async_setup_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp", async_setup=mock_async_setup, async_setup_entry=async_setup_entry
),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input):
"""Test import step creating entry."""
return self.async_create_entry(title="title", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
assert await async_setup_component(hass, "comp", {})
await hass.async_block_till_done()
assert len(async_setup_entry.mock_calls) == 1
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].state is config_entries.ConfigEntryState.LOADED
async def test_async_setup_update_entry(hass):
"""Test a config entry being updated during integration setup."""
entry = MockConfigEntry(domain="comp", data={"value": "initial"})
entry.add_to_hass(hass)
async def mock_async_setup(hass, config):
"""Mock setup."""
hass.async_create_task(
hass.config_entries.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_IMPORT},
data={},
)
)
return True
async def mock_async_setup_entry(hass, entry):
"""Mock setting up an entry."""
assert entry.data["value"] == "updated"
return True
mock_integration(
hass,
MockModule(
"comp",
async_setup=mock_async_setup,
async_setup_entry=mock_async_setup_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_import(self, user_input):
"""Test import step updating existing entry."""
assert (
self.hass.config_entries.async_update_entry(
entry, data={"value": "updated"}
)
is True
)
return self.async_abort(reason="yo")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
assert await async_setup_component(hass, "comp", {})
entries = hass.config_entries.async_entries("comp")
assert len(entries) == 1
assert entries[0].state is config_entries.ConfigEntryState.LOADED
assert entries[0].data == {"value": "updated"}
@pytest.mark.parametrize(
"discovery_source",
(
(config_entries.SOURCE_DISCOVERY, {}),
(config_entries.SOURCE_SSDP, BaseServiceInfo()),
(config_entries.SOURCE_USB, BaseServiceInfo()),
(config_entries.SOURCE_HOMEKIT, BaseServiceInfo()),
(config_entries.SOURCE_DHCP, BaseServiceInfo()),
(config_entries.SOURCE_ZEROCONF, BaseServiceInfo()),
(config_entries.SOURCE_HASSIO, HassioServiceInfo(config={})),
),
)
async def test_flow_with_default_discovery(hass, manager, discovery_source):
"""Test that finishing a default discovery flow removes the unique ID in the entry."""
mock_integration(
hass,
MockModule("comp", async_setup_entry=AsyncMock(return_value=True)),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_create_entry(title="yo", data={})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
result = await manager.flow.async_init(
"comp", context={"source": discovery_source[0]}, data=discovery_source[1]
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert (
flows[0]["context"]["unique_id"]
== config_entries.DEFAULT_DISCOVERY_UNIQUE_ID
)
# Finish flow
result2 = await manager.flow.async_configure(
result["flow_id"], user_input={"fake": "data"}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
entry = hass.config_entries.async_entries("comp")[0]
assert entry.title == "yo"
assert entry.source == discovery_source[0]
assert entry.unique_id is None
async def test_flow_with_default_discovery_with_unique_id(hass, manager):
"""Test discovery flow using the default discovery is ignored when unique ID is set."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id("mock-unique-id")
# This call should make no difference, as a unique ID is set
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id"
async def test_default_discovery_abort_existing_entries(hass, manager):
"""Test that a flow without discovery implementation aborts when a config entry exists."""
hass.config.components.add("comp")
entry = MockConfigEntry(domain="comp", data={}, unique_id="mock-unique-id")
entry.add_to_hass(hass)
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
async def test_default_discovery_in_progress(hass, manager):
"""Test that a flow using default discovery can only be triggered once."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id(discovery_info.get("unique_id"))
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Second discovery without a unique ID
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_ABORT
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id"
async def test_default_discovery_abort_on_new_unique_flow(hass, manager):
"""Test that a flow using default discovery is aborted when a second flow with unique ID is created."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_discovery(self, discovery_info):
"""Test discovery step."""
await self.async_set_unique_id(discovery_info.get("unique_id"))
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# First discovery with default, no unique ID
result2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_FORM
# Second discovery brings in a unique ID
result = await manager.flow.async_init(
"comp",
context={"source": config_entries.SOURCE_DISCOVERY},
data={"unique_id": "mock-unique-id"},
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
# Ensure the first one is cancelled and we end up with just the last one
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "mock-unique-id"
async def test_default_discovery_abort_on_user_flow_complete(hass, manager):
"""Test that a flow using default discovery is aborted when a second flow completes."""
mock_integration(hass, MockModule("comp"))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
if user_input is None:
return self.async_show_form(step_id="user")
return self.async_create_entry(title="title", data={"token": "supersecret"})
async def async_step_discovery(self, discovery_info=None):
"""Test discovery step."""
await self._async_handle_discovery_without_unique_id()
return self.async_show_form(step_id="mock")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# First discovery with default, no unique ID
flow1 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_DISCOVERY}, data={}
)
assert flow1["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
# User sets up a manual flow
flow2 = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
assert flow2["type"] == data_entry_flow.RESULT_TYPE_FORM
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 2
# Complete the manual flow
result = await hass.config_entries.flow.async_configure(flow2["flow_id"], {})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
# Ensure the first flow is gone now
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 0
async def test_flow_same_device_multiple_sources(hass, manager):
"""Test discovery of the same devices from multiple discovery sources."""
mock_integration(
hass,
MockModule("comp", async_setup_entry=AsyncMock(return_value=True)),
)
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_zeroconf(self, discovery_info=None):
"""Test zeroconf step."""
return await self._async_discovery_handler(discovery_info)
async def async_step_homekit(self, discovery_info=None):
"""Test homekit step."""
return await self._async_discovery_handler(discovery_info)
async def _async_discovery_handler(self, discovery_info=None):
"""Test any discovery handler."""
await self.async_set_unique_id("thisid")
self._abort_if_unique_id_configured()
await asyncio.sleep(0.1)
return await self.async_step_link()
async def async_step_link(self, user_input=None):
"""Test a link step."""
if user_input is None:
return self.async_show_form(step_id="link")
return self.async_create_entry(title="title", data={"token": "supersecret"})
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow}):
# Create one to be in progress
flow1 = manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_ZEROCONF}
)
flow2 = manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_ZEROCONF}
)
flow3 = manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_HOMEKIT}
)
result1, result2, result3 = await asyncio.gather(flow1, flow2, flow3)
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["unique_id"] == "thisid"
# Finish flow
result2 = await manager.flow.async_configure(
flows[0]["flow_id"], user_input={"fake": "data"}
)
assert result2["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert len(hass.config_entries.flow.async_progress()) == 0
entry = hass.config_entries.async_entries("comp")[0]
assert entry.title == "title"
assert entry.source in {
config_entries.SOURCE_ZEROCONF,
config_entries.SOURCE_HOMEKIT,
}
assert entry.unique_id == "thisid"
async def test_updating_entry_with_and_without_changes(manager):
"""Test that we can update an entry data."""
entry = MockConfigEntry(
domain="test",
data={"first": True},
title="thetitle",
options={"option": True},
unique_id="abc123",
state=config_entries.ConfigEntryState.SETUP_ERROR,
)
entry.add_to_manager(manager)
assert manager.async_update_entry(entry) is False
for change in (
{"data": {"second": True, "third": 456}},
{"data": {"second": True}},
{"options": {"hello": True}},
{"pref_disable_new_entities": True},
{"pref_disable_polling": True},
{"title": "sometitle"},
{"unique_id": "abcd1234"},
):
assert manager.async_update_entry(entry, **change) is True
assert manager.async_update_entry(entry, **change) is False
async def test_entry_reload_calls_on_unload_listeners(hass, manager):
"""Test reload calls the on unload listeners."""
entry = MockConfigEntry(domain="comp", state=config_entries.ConfigEntryState.LOADED)
entry.add_to_hass(hass)
async_setup = AsyncMock(return_value=True)
mock_setup_entry = AsyncMock(return_value=True)
async_unload_entry = AsyncMock(return_value=True)
mock_integration(
hass,
MockModule(
"comp",
async_setup=async_setup,
async_setup_entry=mock_setup_entry,
async_unload_entry=async_unload_entry,
),
)
mock_entity_platform(hass, "config_flow.comp", None)
mock_unload_callback = Mock()
entry.async_on_unload(mock_unload_callback)
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
assert len(mock_unload_callback.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
assert await manager.async_reload(entry.entry_id)
assert len(async_unload_entry.mock_calls) == 2
assert len(mock_setup_entry.mock_calls) == 2
# Since we did not register another async_on_unload it should
# have only been called once
assert len(mock_unload_callback.mock_calls) == 1
assert entry.state is config_entries.ConfigEntryState.LOADED
async def test_setup_raise_auth_failed(hass, caplog):
"""Test a setup raising ConfigEntryAuthFailed."""
entry = MockConfigEntry(title="test_title", domain="test")
mock_setup_entry = AsyncMock(
side_effect=ConfigEntryAuthFailed("The password is no longer valid")
)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "could not authenticate: The password is no longer valid" in caplog.text
assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR
assert entry.reason == "The password is no longer valid"
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["entry_id"] == entry.entry_id
assert flows[0]["context"]["source"] == config_entries.SOURCE_REAUTH
assert flows[0]["context"]["title_placeholders"] == {"name": "test_title"}
caplog.clear()
entry.state = config_entries.ConfigEntryState.NOT_LOADED
entry.reason = None
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "could not authenticate: The password is no longer valid" in caplog.text
# Verify multiple ConfigEntryAuthFailed does not generate a second flow
assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
async def test_setup_raise_auth_failed_from_first_coordinator_update(hass, caplog):
"""Test async_config_entry_first_refresh raises ConfigEntryAuthFailed."""
entry = MockConfigEntry(title="test_title", domain="test")
async def async_setup_entry(hass, entry):
"""Mock setup entry with a simple coordinator."""
async def _async_update_data():
raise ConfigEntryAuthFailed("The password is no longer valid")
coordinator = DataUpdateCoordinator(
hass,
logging.getLogger(__name__),
name="any",
update_method=_async_update_data,
update_interval=timedelta(seconds=1000),
)
await coordinator.async_config_entry_first_refresh()
return True
mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "could not authenticate: The password is no longer valid" in caplog.text
assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["entry_id"] == entry.entry_id
assert flows[0]["context"]["source"] == config_entries.SOURCE_REAUTH
caplog.clear()
entry.state = config_entries.ConfigEntryState.NOT_LOADED
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "could not authenticate: The password is no longer valid" in caplog.text
# Verify multiple ConfigEntryAuthFailed does not generate a second flow
assert entry.state is config_entries.ConfigEntryState.SETUP_ERROR
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
async def test_setup_raise_auth_failed_from_future_coordinator_update(hass, caplog):
"""Test a coordinator raises ConfigEntryAuthFailed in the future."""
entry = MockConfigEntry(title="test_title", domain="test")
async def async_setup_entry(hass, entry):
"""Mock setup entry with a simple coordinator."""
async def _async_update_data():
raise ConfigEntryAuthFailed("The password is no longer valid")
coordinator = DataUpdateCoordinator(
hass,
logging.getLogger(__name__),
name="any",
update_method=_async_update_data,
update_interval=timedelta(seconds=1000),
)
await coordinator.async_refresh()
return True
mock_integration(hass, MockModule("test", async_setup_entry=async_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "Authentication failed while fetching" in caplog.text
assert "The password is no longer valid" in caplog.text
assert entry.state is config_entries.ConfigEntryState.LOADED
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
assert flows[0]["context"]["entry_id"] == entry.entry_id
assert flows[0]["context"]["source"] == config_entries.SOURCE_REAUTH
caplog.clear()
entry.state = config_entries.ConfigEntryState.NOT_LOADED
await entry.async_setup(hass)
await hass.async_block_till_done()
assert "Authentication failed while fetching" in caplog.text
assert "The password is no longer valid" in caplog.text
# Verify multiple ConfigEntryAuthFailed does not generate a second flow
assert entry.state is config_entries.ConfigEntryState.LOADED
flows = hass.config_entries.flow.async_progress()
assert len(flows) == 1
async def test_initialize_and_shutdown(hass):
"""Test we call the shutdown function at stop."""
manager = config_entries.ConfigEntries(hass, {})
with patch.object(manager, "_async_shutdown") as mock_async_shutdown:
await manager.async_initialize()
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert mock_async_shutdown.called
async def test_setup_retrying_during_shutdown(hass):
"""Test if we shutdown an entry that is in retry mode."""
entry = MockConfigEntry(domain="test")
mock_setup_entry = AsyncMock(side_effect=ConfigEntryNotReady)
mock_integration(hass, MockModule("test", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.test", None)
with patch("homeassistant.helpers.event.async_call_later") as mock_call:
await entry.async_setup(hass)
assert entry.state is config_entries.ConfigEntryState.SETUP_RETRY
assert len(mock_call.return_value.mock_calls) == 0
hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP)
await hass.async_block_till_done()
assert len(mock_call.return_value.mock_calls) == 0
async_fire_time_changed(hass, dt.utcnow() + timedelta(hours=4))
await hass.async_block_till_done()
assert len(mock_call.return_value.mock_calls) == 0
@pytest.mark.parametrize(
"matchers, reason",
[
({}, "already_configured"),
({"host": "3.3.3.3"}, "no_match"),
({"vendor": "no_match"}, "no_match"),
({"host": "3.4.5.6"}, "already_configured"),
({"host": "3.4.5.6", "ip": "3.4.5.6"}, "no_match"),
({"host": "3.4.5.6", "ip": "1.2.3.4"}, "already_configured"),
({"host": "3.4.5.6", "ip": "1.2.3.4", "port": 23}, "already_configured"),
(
{"host": "9.9.9.9", "ip": "6.6.6.6", "port": 12, "vendor": "zoo"},
"already_configured",
),
({"vendor": "zoo"}, "already_configured"),
({"ip": "9.9.9.9"}, "already_configured"),
({"ip": "7.7.7.7"}, "no_match"), # ignored
({"vendor": "data"}, "no_match"),
(
{"vendor": "options"},
"already_configured",
), # ensure options takes precedence over data
],
)
async def test__async_abort_entries_match(hass, manager, matchers, reason):
"""Test aborting if matching config entries exist."""
MockConfigEntry(
domain="comp", data={"ip": "1.2.3.4", "host": "4.5.6.7", "port": 23}
).add_to_hass(hass)
MockConfigEntry(
domain="comp", data={"ip": "9.9.9.9", "host": "4.5.6.7", "port": 23}
).add_to_hass(hass)
MockConfigEntry(
domain="comp", data={"ip": "1.2.3.4", "host": "3.4.5.6", "port": 23}
).add_to_hass(hass)
MockConfigEntry(
domain="comp",
source=config_entries.SOURCE_IGNORE,
data={"ip": "7.7.7.7", "host": "4.5.6.7", "port": 23},
).add_to_hass(hass)
MockConfigEntry(
domain="comp",
data={"ip": "6.6.6.6", "host": "9.9.9.9", "port": 12},
options={"vendor": "zoo"},
).add_to_hass(hass)
MockConfigEntry(
domain="comp",
data={"vendor": "data"},
options={"vendor": "options"},
).add_to_hass(hass)
mock_setup_entry = AsyncMock(return_value=True)
mock_integration(hass, MockModule("comp", async_setup_entry=mock_setup_entry))
mock_entity_platform(hass, "config_flow.comp", None)
class TestFlow(config_entries.ConfigFlow):
"""Test flow."""
VERSION = 1
async def async_step_user(self, user_input=None):
"""Test user step."""
self._async_abort_entries_match(matchers)
return self.async_abort(reason="no_match")
with patch.dict(config_entries.HANDLERS, {"comp": TestFlow, "beer": 5}):
result = await manager.flow.async_init(
"comp", context={"source": config_entries.SOURCE_USER}
)
await hass.async_block_till_done()
assert result["type"] == RESULT_TYPE_ABORT
assert result["reason"] == reason
async def test_loading_old_data(hass, hass_storage):
"""Test automatically migrating old data."""
hass_storage[config_entries.STORAGE_KEY] = {
"version": 1,
"data": {
"entries": [
{
"version": 5,
"domain": "my_domain",
"entry_id": "mock-id",
"data": {"my": "data"},
"source": "user",
"title": "Mock title",
"system_options": {"disable_new_entities": True},
}
]
},
}
manager = config_entries.ConfigEntries(hass, {})
await manager.async_initialize()
entries = manager.async_entries()
assert len(entries) == 1
entry = entries[0]
assert entry.version == 5
assert entry.domain == "my_domain"
assert entry.entry_id == "mock-id"
assert entry.title == "Mock title"
assert entry.data == {"my": "data"}
assert entry.pref_disable_new_entities is True
async def test_deprecated_disabled_by_str_ctor(hass, caplog):
"""Test deprecated str disabled_by constructor enumizes and logs a warning."""
entry = MockConfigEntry(disabled_by=config_entries.ConfigEntryDisabler.USER.value)
assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER
assert " str for config entry disabled_by. This is deprecated " in caplog.text
async def test_deprecated_disabled_by_str_set(hass, manager, caplog):
"""Test deprecated str set disabled_by enumizes and logs a warning."""
entry = MockConfigEntry()
entry.add_to_manager(manager)
assert await manager.async_set_disabled_by(
entry.entry_id, config_entries.ConfigEntryDisabler.USER.value
)
assert entry.disabled_by is config_entries.ConfigEntryDisabler.USER
assert " str for config entry disabled_by. This is deprecated " in caplog.text
|
{
"content_hash": "47a03dfe6ccb72a576c6d99aac038ae6",
"timestamp": "",
"source": "github",
"line_count": 3014,
"max_line_length": 108,
"avg_line_length": 35.6413404114134,
"alnum_prop": 0.6476173631345242,
"repo_name": "rohitranjan1991/home-assistant",
"id": "b62e9bffbce40512622f9a468368d998c27d0099",
"size": "107423",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "tests/test_config_entries.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1017265"
},
{
"name": "Python",
"bytes": "1051086"
},
{
"name": "Shell",
"bytes": "3946"
}
],
"symlink_target": ""
}
|
from enum import Enum
import yaml
import os
import datetime
import glob
import time
import threading
from abc import ABC, abstractmethod
import json
import requests
import traceback
from nameko.rpc import rpc
from nameko.timer import timer
from nameko.events import EventDispatcher, event_handler
from nameko.dependency_providers import Config
from nameko.standalone.events import event_dispatcher
from nameko.events import event_handler
from AutoReef.common import rabbit_config, load_config
import RPi.GPIO as GPIO
import Adafruit_CharLCD as LCD
"""
Relays / GPIO
"""
class RelayMode(Enum):
NORMAL_OPEN = 1
NORMAL_CLOSED = 2
class State(Enum):
HIGH = 1
LOW = 0
ERROR = 2
class DeviceType(Enum):
HEATER = 1
PUMP = 2
SKIMMER = 3
LIGHT = 4
FAN = 6
#TODO: Really need to move this somewhere else
class LogLevel(Enum):
DEBUG = 1
INFO = 2
WARN = 3
CRIT = 4
ERROR = 5
class GPIODevice:
name = None
pin = None
direction = None
@property
def state(self):
val = GPIO.input(self.pin)
if val == 0:
return State.LOW
elif val == 1:
return State.HIGH
else:
return State.ERROR
def __init__(self, pin, direction, name = None):
self.name = name
self.pin = pin
self.direction = direction
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
GPIO.setup(self.pin, self.direction)
def on(self):
GPIO.output(self.pin, GPIO.HIGH)
def off(self):
GPIO.output(self.pin, GPIO.LOW)
class Relay(GPIODevice):
relay_mode = RelayMode.NORMAL_OPEN
device_type = None
config_file="relayConfig.yaml"
@property
def state(self):
val = GPIO.input(self.pin)
if val == 0 and self.relay_mode == RelayMode.NORMAL_OPEN:
return State.LOW
elif val == 1 and self.relay_mode == RelayMode.NORMAL_OPEN:
return State.HIGH
elif val == 0 and self.relay_mode == RelayMode.NORMAL_CLOSED:
return State.HIGH
elif val == 1 and self.relay_mode == RelayMode.NORMAL_CLOSED:
return State.LOW
else:
return State.ERROR
def __init__(self, pin, relay_mode, device_type = None, name = None):
self.relay_mode = relay_mode
self.device_type = device_type
super().__init__(pin, GPIO.OUT, name)
def on(self):
if self.state != State.HIGH:
try:
if self.relay_mode == RelayMode.NORMAL_OPEN:
super().on()
else:
super().off()
self._log(LogLevel.INFO, "Turned on")
except:
self._log(LogLevel.ERROR, "Unable to turn on")
def off(self):
if self.state != State.LOW:
try:
if self.relay_mode == RelayMode.NORMAL_OPEN:
super().off()
else:
super().on()
self._log(LogLevel.INFO, "Turned off")
except:
self._log(LogLevel.ERROR, "Unable to turn off")
def _log(self, log_level, message):
time = datetime.datetime.now()
payload = {
"time" : str(time),
"log_level" : str(log_level),
"device_type" : str(self.device_type),
"name" : self.name,
"message" : message
}
dispatch = event_dispatcher(rabbit_config)
dispatch("Relay", "event_log", payload)
@classmethod
def load_all(cls):
config = load_config(cls.config_file)
return cls._config_to_obj(config)
@classmethod
def load_by_name(cls, name):
config = load_config(cls.config_file)
matches = [x for x in config if x["name"] == name]
return cls._config_to_obj(matches)
@classmethod
def load_by_type(cls, device_type):
config = load_config(cls.config_file)
matches = [x for x in config if ("DeviceType." + x["device_type"]) == str(device_type)]
return cls._config_to_obj(matches)
@classmethod
def _config_to_obj(cls, conf_list):
relay_list = []
for conf in conf_list:
name = conf["name"]
device_type = getattr(DeviceType, conf["device_type"], None)
pin = conf["pin"]
relay_mode = getattr(RelayMode, conf["relay_mode"], None)
relay = cls(pin, relay_mode, device_type, name)
relay_list.append(relay)
return relay_list
"""
LCD Panel / Controller
"""
# Initialize the LCD using the pins
lock = threading.RLock()
# Show some basic colors.
#RED lcd.set_color(1.0, 0.0, 0.0)
#GREEN lcd.set_color(0.0, 1.0, 0.0)
#BLUE lcd.set_color(0.0, 0.0, 1.0)
#YELLOW lcd.set_color(1.0, 1.0, 0.0)
#CYAN lcd.set_color(0.0, 1.0, 1.0)
#MAGENTA lcd.set_color(1.0, 0.0, 1.0)
#WHITE lcd.set_color(1.0, 1.0, 1.0)
class EventQueue:
_event_list = []
def add_event(self):
pass
class LCDController(EventQueue):
_lcd = None
_event_mode=True
def __init__(self):
self.reset()
def reset(self):
self._lcd = LCD.Adafruit_CharLCDPlate()
self._lcd.clear()
self._lcd.message("AutoReef init...")
def check_button_thread(self):
while True:
# Loop through each button and check if it is pressed.
for button in buttons:
try:
if lcd.is_pressed(button[0]):
# Button is pressed, change the message and backlight.
lcd.clear()
lcd.message(button[1])
lcd.set_color(button[2][0], button[2][1], button[2][2])
except OSError:
print("OSError")
def display_event_thread(self):
pass
class EventListener:
name="EventListener"
@event_handler("DS18B20", "log_temp")
def temp_event_handler(self, payload):
lcd.add_event(payload)
@event_handler("Relay", "event_log")
def relay_event_handler(self, payload):
lcd.add_event(payload)
"""
Temp Sensors
"""
class TempSensorState(Enum):
LOW_TEMP_CRITICAL = 1
LOW_TEMP_WARNING = 2
LOW_TEMP = 3
NORMAL_TEMP = 4
HIGH_TEMP = 5
HIGH_TEMP_WARNING = 6
HIGH_TEMP_CRITICAL = 7
class TempSensor(ABC):
@abstractmethod
def get_temp(self):
pass
class DS18B20(TempSensor):
device_path = None
name = None
def __init__(self, device_path, name=None):
self.device_path = device_path
self.name = name
def __json__(self):
return {"name" : self.name, "device_path" : self.device_path}
def get_temp(self):
lines = self.__get_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = self.get_temp()
equals_pos = lines[1].find('t=')
if equals_pos != -1:
temp_string = lines[1][equals_pos+2:]
return float(temp_string) / 1000.0
def __get_temp_raw(self):
f = open(self.device_path + '/w1_slave', 'r')
lines = f.readlines()
f.close()
return lines
@classmethod
def get_all(cls):
"""Setup the ds18b20 sensors"""
os.system('modprobe w1-gpio')
os.system('modprobe w1-therm')
base_dir = '/sys/bus/w1/devices/'
sensor_paths = glob.glob(base_dir + '28*')
sensor_list = []
index = 1
for path in sensor_paths:
sensor = cls("Probe " + str(index), path)
sensor_list.append(sensor)
index += 1
return sensor_list
class Canary(object):
@staticmethod
def still_alive(status_code = 0):
try:
r = requests.get("https://nosnch.in/966621a8ac?s="+str(status_code))
except Exception as e:
print("Error calling canary: " + traceback.format_exc())
|
{
"content_hash": "c8de8127eb693ef7e22a4f8677507af7",
"timestamp": "",
"source": "github",
"line_count": 342,
"max_line_length": 95,
"avg_line_length": 23.67543859649123,
"alnum_prop": 0.5542793627269359,
"repo_name": "mcclown/AutoReef",
"id": "ebb348374c8a85096a1de708c1f456543aa58ae6",
"size": "8097",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "AutoReef/integrations.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "21958"
}
],
"symlink_target": ""
}
|
import inspect
import sys
import os
import tempfile
from io import StringIO
from unittest import mock
import numpy as np
from numpy.testing import (
assert_, assert_equal, assert_raises, assert_raises_regex)
from numpy.core.overrides import (
_get_implementing_args, array_function_dispatch,
verify_matching_signatures, ARRAY_FUNCTION_ENABLED)
from numpy.compat import pickle
import pytest
requires_array_function = pytest.mark.skipif(
not ARRAY_FUNCTION_ENABLED,
reason="__array_function__ dispatch not enabled.")
def _return_not_implemented(self, *args, **kwargs):
return NotImplemented
# need to define this at the top level to test pickling
@array_function_dispatch(lambda array: (array,))
def dispatched_one_arg(array):
"""Docstring."""
return 'original'
@array_function_dispatch(lambda array1, array2: (array1, array2))
def dispatched_two_arg(array1, array2):
"""Docstring."""
return 'original'
class TestGetImplementingArgs:
def test_ndarray(self):
array = np.array(1)
args = _get_implementing_args([array])
assert_equal(list(args), [array])
args = _get_implementing_args([array, array])
assert_equal(list(args), [array])
args = _get_implementing_args([array, 1])
assert_equal(list(args), [array])
args = _get_implementing_args([1, array])
assert_equal(list(args), [array])
def test_ndarray_subclasses(self):
class OverrideSub(np.ndarray):
__array_function__ = _return_not_implemented
class NoOverrideSub(np.ndarray):
pass
array = np.array(1).view(np.ndarray)
override_sub = np.array(1).view(OverrideSub)
no_override_sub = np.array(1).view(NoOverrideSub)
args = _get_implementing_args([array, override_sub])
assert_equal(list(args), [override_sub, array])
args = _get_implementing_args([array, no_override_sub])
assert_equal(list(args), [no_override_sub, array])
args = _get_implementing_args(
[override_sub, no_override_sub])
assert_equal(list(args), [override_sub, no_override_sub])
def test_ndarray_and_duck_array(self):
class Other:
__array_function__ = _return_not_implemented
array = np.array(1)
other = Other()
args = _get_implementing_args([other, array])
assert_equal(list(args), [other, array])
args = _get_implementing_args([array, other])
assert_equal(list(args), [array, other])
def test_ndarray_subclass_and_duck_array(self):
class OverrideSub(np.ndarray):
__array_function__ = _return_not_implemented
class Other:
__array_function__ = _return_not_implemented
array = np.array(1)
subarray = np.array(1).view(OverrideSub)
other = Other()
assert_equal(_get_implementing_args([array, subarray, other]),
[subarray, array, other])
assert_equal(_get_implementing_args([array, other, subarray]),
[subarray, array, other])
def test_many_duck_arrays(self):
class A:
__array_function__ = _return_not_implemented
class B(A):
__array_function__ = _return_not_implemented
class C(A):
__array_function__ = _return_not_implemented
class D:
__array_function__ = _return_not_implemented
a = A()
b = B()
c = C()
d = D()
assert_equal(_get_implementing_args([1]), [])
assert_equal(_get_implementing_args([a]), [a])
assert_equal(_get_implementing_args([a, 1]), [a])
assert_equal(_get_implementing_args([a, a, a]), [a])
assert_equal(_get_implementing_args([a, d, a]), [a, d])
assert_equal(_get_implementing_args([a, b]), [b, a])
assert_equal(_get_implementing_args([b, a]), [b, a])
assert_equal(_get_implementing_args([a, b, c]), [b, c, a])
assert_equal(_get_implementing_args([a, c, b]), [c, b, a])
def test_too_many_duck_arrays(self):
namespace = dict(__array_function__=_return_not_implemented)
types = [type('A' + str(i), (object,), namespace) for i in range(33)]
relevant_args = [t() for t in types]
actual = _get_implementing_args(relevant_args[:32])
assert_equal(actual, relevant_args[:32])
with assert_raises_regex(TypeError, 'distinct argument types'):
_get_implementing_args(relevant_args)
class TestNDArrayArrayFunction:
@requires_array_function
def test_method(self):
class Other:
__array_function__ = _return_not_implemented
class NoOverrideSub(np.ndarray):
pass
class OverrideSub(np.ndarray):
__array_function__ = _return_not_implemented
array = np.array([1])
other = Other()
no_override_sub = array.view(NoOverrideSub)
override_sub = array.view(OverrideSub)
result = array.__array_function__(func=dispatched_two_arg,
types=(np.ndarray,),
args=(array, 1.), kwargs={})
assert_equal(result, 'original')
result = array.__array_function__(func=dispatched_two_arg,
types=(np.ndarray, Other),
args=(array, other), kwargs={})
assert_(result is NotImplemented)
result = array.__array_function__(func=dispatched_two_arg,
types=(np.ndarray, NoOverrideSub),
args=(array, no_override_sub),
kwargs={})
assert_equal(result, 'original')
result = array.__array_function__(func=dispatched_two_arg,
types=(np.ndarray, OverrideSub),
args=(array, override_sub),
kwargs={})
assert_equal(result, 'original')
with assert_raises_regex(TypeError, 'no implementation found'):
np.concatenate((array, other))
expected = np.concatenate((array, array))
result = np.concatenate((array, no_override_sub))
assert_equal(result, expected.view(NoOverrideSub))
result = np.concatenate((array, override_sub))
assert_equal(result, expected.view(OverrideSub))
def test_no_wrapper(self):
# This shouldn't happen unless a user intentionally calls
# __array_function__ with invalid arguments, but check that we raise
# an appropriate error all the same.
array = np.array(1)
func = lambda x: x
with assert_raises_regex(AttributeError, '_implementation'):
array.__array_function__(func=func, types=(np.ndarray,),
args=(array,), kwargs={})
@requires_array_function
class TestArrayFunctionDispatch:
def test_pickle(self):
for proto in range(2, pickle.HIGHEST_PROTOCOL + 1):
roundtripped = pickle.loads(
pickle.dumps(dispatched_one_arg, protocol=proto))
assert_(roundtripped is dispatched_one_arg)
def test_name_and_docstring(self):
assert_equal(dispatched_one_arg.__name__, 'dispatched_one_arg')
if sys.flags.optimize < 2:
assert_equal(dispatched_one_arg.__doc__, 'Docstring.')
def test_interface(self):
class MyArray:
def __array_function__(self, func, types, args, kwargs):
return (self, func, types, args, kwargs)
original = MyArray()
(obj, func, types, args, kwargs) = dispatched_one_arg(original)
assert_(obj is original)
assert_(func is dispatched_one_arg)
assert_equal(set(types), {MyArray})
# assert_equal uses the overloaded np.iscomplexobj() internally
assert_(args == (original,))
assert_equal(kwargs, {})
def test_not_implemented(self):
class MyArray:
def __array_function__(self, func, types, args, kwargs):
return NotImplemented
array = MyArray()
with assert_raises_regex(TypeError, 'no implementation found'):
dispatched_one_arg(array)
@requires_array_function
class TestVerifyMatchingSignatures:
def test_verify_matching_signatures(self):
verify_matching_signatures(lambda x: 0, lambda x: 0)
verify_matching_signatures(lambda x=None: 0, lambda x=None: 0)
verify_matching_signatures(lambda x=1: 0, lambda x=None: 0)
with assert_raises(RuntimeError):
verify_matching_signatures(lambda a: 0, lambda b: 0)
with assert_raises(RuntimeError):
verify_matching_signatures(lambda x: 0, lambda x=None: 0)
with assert_raises(RuntimeError):
verify_matching_signatures(lambda x=None: 0, lambda y=None: 0)
with assert_raises(RuntimeError):
verify_matching_signatures(lambda x=1: 0, lambda y=1: 0)
def test_array_function_dispatch(self):
with assert_raises(RuntimeError):
@array_function_dispatch(lambda x: (x,))
def f(y):
pass
# should not raise
@array_function_dispatch(lambda x: (x,), verify=False)
def f(y):
pass
def _new_duck_type_and_implements():
"""Create a duck array type and implements functions."""
HANDLED_FUNCTIONS = {}
class MyArray:
def __array_function__(self, func, types, args, kwargs):
if func not in HANDLED_FUNCTIONS:
return NotImplemented
if not all(issubclass(t, MyArray) for t in types):
return NotImplemented
return HANDLED_FUNCTIONS[func](*args, **kwargs)
def implements(numpy_function):
"""Register an __array_function__ implementations."""
def decorator(func):
HANDLED_FUNCTIONS[numpy_function] = func
return func
return decorator
return (MyArray, implements)
@requires_array_function
class TestArrayFunctionImplementation:
def test_one_arg(self):
MyArray, implements = _new_duck_type_and_implements()
@implements(dispatched_one_arg)
def _(array):
return 'myarray'
assert_equal(dispatched_one_arg(1), 'original')
assert_equal(dispatched_one_arg(MyArray()), 'myarray')
def test_optional_args(self):
MyArray, implements = _new_duck_type_and_implements()
@array_function_dispatch(lambda array, option=None: (array,))
def func_with_option(array, option='default'):
return option
@implements(func_with_option)
def my_array_func_with_option(array, new_option='myarray'):
return new_option
# we don't need to implement every option on __array_function__
# implementations
assert_equal(func_with_option(1), 'default')
assert_equal(func_with_option(1, option='extra'), 'extra')
assert_equal(func_with_option(MyArray()), 'myarray')
with assert_raises(TypeError):
func_with_option(MyArray(), option='extra')
# but new options on implementations can't be used
result = my_array_func_with_option(MyArray(), new_option='yes')
assert_equal(result, 'yes')
with assert_raises(TypeError):
func_with_option(MyArray(), new_option='no')
def test_not_implemented(self):
MyArray, implements = _new_duck_type_and_implements()
@array_function_dispatch(lambda array: (array,), module='my')
def func(array):
return array
array = np.array(1)
assert_(func(array) is array)
assert_equal(func.__module__, 'my')
with assert_raises_regex(
TypeError, "no implementation found for 'my.func'"):
func(MyArray())
class TestNDArrayMethods:
def test_repr(self):
# gh-12162: should still be defined even if __array_function__ doesn't
# implement np.array_repr()
class MyArray(np.ndarray):
def __array_function__(*args, **kwargs):
return NotImplemented
array = np.array(1).view(MyArray)
assert_equal(repr(array), 'MyArray(1)')
assert_equal(str(array), '1')
class TestNumPyFunctions:
def test_set_module(self):
assert_equal(np.sum.__module__, 'numpy')
assert_equal(np.char.equal.__module__, 'numpy.char')
assert_equal(np.fft.fft.__module__, 'numpy.fft')
assert_equal(np.linalg.solve.__module__, 'numpy.linalg')
def test_inspect_sum(self):
signature = inspect.signature(np.sum)
assert_('axis' in signature.parameters)
@requires_array_function
def test_override_sum(self):
MyArray, implements = _new_duck_type_and_implements()
@implements(np.sum)
def _(array):
return 'yes'
assert_equal(np.sum(MyArray()), 'yes')
@requires_array_function
def test_sum_on_mock_array(self):
# We need a proxy for mocks because __array_function__ is only looked
# up in the class dict
class ArrayProxy:
def __init__(self, value):
self.value = value
def __array_function__(self, *args, **kwargs):
return self.value.__array_function__(*args, **kwargs)
def __array__(self, *args, **kwargs):
return self.value.__array__(*args, **kwargs)
proxy = ArrayProxy(mock.Mock(spec=ArrayProxy))
proxy.value.__array_function__.return_value = 1
result = np.sum(proxy)
assert_equal(result, 1)
proxy.value.__array_function__.assert_called_once_with(
np.sum, (ArrayProxy,), (proxy,), {})
proxy.value.__array__.assert_not_called()
@requires_array_function
def test_sum_forwarding_implementation(self):
class MyArray(np.ndarray):
def sum(self, axis, out):
return 'summed'
def __array_function__(self, func, types, args, kwargs):
return super().__array_function__(func, types, args, kwargs)
# note: the internal implementation of np.sum() calls the .sum() method
array = np.array(1).view(MyArray)
assert_equal(np.sum(array), 'summed')
class TestArrayLike:
def setup(self):
class MyArray():
def __init__(self, function=None):
self.function = function
def __array_function__(self, func, types, args, kwargs):
assert func is getattr(np, func.__name__)
try:
my_func = getattr(self, func.__name__)
except AttributeError:
return NotImplemented
return my_func(*args, **kwargs)
self.MyArray = MyArray
class MyNoArrayFunctionArray():
def __init__(self, function=None):
self.function = function
self.MyNoArrayFunctionArray = MyNoArrayFunctionArray
def add_method(self, name, arr_class, enable_value_error=False):
def _definition(*args, **kwargs):
# Check that `like=` isn't propagated downstream
assert 'like' not in kwargs
if enable_value_error and 'value_error' in kwargs:
raise ValueError
return arr_class(getattr(arr_class, name))
setattr(arr_class, name, _definition)
def func_args(*args, **kwargs):
return args, kwargs
@requires_array_function
def test_array_like_not_implemented(self):
self.add_method('array', self.MyArray)
ref = self.MyArray.array()
with assert_raises_regex(TypeError, 'no implementation found'):
array_like = np.asarray(1, like=ref)
_array_tests = [
('array', *func_args((1,))),
('asarray', *func_args((1,))),
('asanyarray', *func_args((1,))),
('ascontiguousarray', *func_args((2, 3))),
('asfortranarray', *func_args((2, 3))),
('require', *func_args((np.arange(6).reshape(2, 3),),
requirements=['A', 'F'])),
('empty', *func_args((1,))),
('full', *func_args((1,), 2)),
('ones', *func_args((1,))),
('zeros', *func_args((1,))),
('arange', *func_args(3)),
('frombuffer', *func_args(b'\x00' * 8, dtype=int)),
('fromiter', *func_args(range(3), dtype=int)),
('fromstring', *func_args('1,2', dtype=int, sep=',')),
('loadtxt', *func_args(lambda: StringIO('0 1\n2 3'))),
('genfromtxt', *func_args(lambda: StringIO(u'1,2.1'),
dtype=[('int', 'i8'), ('float', 'f8')],
delimiter=',')),
]
@pytest.mark.parametrize('function, args, kwargs', _array_tests)
@pytest.mark.parametrize('numpy_ref', [True, False])
@requires_array_function
def test_array_like(self, function, args, kwargs, numpy_ref):
self.add_method('array', self.MyArray)
self.add_method(function, self.MyArray)
np_func = getattr(np, function)
my_func = getattr(self.MyArray, function)
if numpy_ref is True:
ref = np.array(1)
else:
ref = self.MyArray.array()
like_args = tuple(a() if callable(a) else a for a in args)
array_like = np_func(*like_args, **kwargs, like=ref)
if numpy_ref is True:
assert type(array_like) is np.ndarray
np_args = tuple(a() if callable(a) else a for a in args)
np_arr = np_func(*np_args, **kwargs)
# Special-case np.empty to ensure values match
if function == "empty":
np_arr.fill(1)
array_like.fill(1)
assert_equal(array_like, np_arr)
else:
assert type(array_like) is self.MyArray
assert array_like.function is my_func
@pytest.mark.parametrize('function, args, kwargs', _array_tests)
@pytest.mark.parametrize('ref', [1, [1], "MyNoArrayFunctionArray"])
@requires_array_function
def test_no_array_function_like(self, function, args, kwargs, ref):
self.add_method('array', self.MyNoArrayFunctionArray)
self.add_method(function, self.MyNoArrayFunctionArray)
np_func = getattr(np, function)
# Instantiate ref if it's the MyNoArrayFunctionArray class
if ref == "MyNoArrayFunctionArray":
ref = self.MyNoArrayFunctionArray.array()
like_args = tuple(a() if callable(a) else a for a in args)
with assert_raises_regex(TypeError,
'The `like` argument must be an array-like that implements'):
np_func(*like_args, **kwargs, like=ref)
@pytest.mark.parametrize('numpy_ref', [True, False])
def test_array_like_fromfile(self, numpy_ref):
self.add_method('array', self.MyArray)
self.add_method("fromfile", self.MyArray)
if numpy_ref is True:
ref = np.array(1)
else:
ref = self.MyArray.array()
data = np.random.random(5)
with tempfile.TemporaryDirectory() as tmpdir:
fname = os.path.join(tmpdir, "testfile")
data.tofile(fname)
array_like = np.fromfile(fname, like=ref)
if numpy_ref is True:
assert type(array_like) is np.ndarray
np_res = np.fromfile(fname, like=ref)
assert_equal(np_res, data)
assert_equal(array_like, np_res)
else:
assert type(array_like) is self.MyArray
assert array_like.function is self.MyArray.fromfile
@requires_array_function
def test_exception_handling(self):
self.add_method('array', self.MyArray, enable_value_error=True)
ref = self.MyArray.array()
with assert_raises(TypeError):
# Raises the error about `value_error` being invalid first
np.array(1, value_error=True, like=ref)
|
{
"content_hash": "3331d5dd7ca5f0f6318fce9de1d8345b",
"timestamp": "",
"source": "github",
"line_count": 585,
"max_line_length": 79,
"avg_line_length": 34.51794871794872,
"alnum_prop": 0.5820829000148566,
"repo_name": "jakirkham/numpy",
"id": "36970dbc02ed413353f63bf65a4d089cff81ffed",
"size": "20193",
"binary": false,
"copies": "2",
"ref": "refs/heads/main",
"path": "numpy/core/tests/test_overrides.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "9076052"
},
{
"name": "C++",
"bytes": "172879"
},
{
"name": "Fortran",
"bytes": "10884"
},
{
"name": "JavaScript",
"bytes": "16928"
},
{
"name": "Makefile",
"bytes": "4290"
},
{
"name": "Python",
"bytes": "8342632"
},
{
"name": "Shell",
"bytes": "9605"
},
{
"name": "sed",
"bytes": "5741"
}
],
"symlink_target": ""
}
|
from setuptools import setup
description = """
Full featured redis cluster cache backend for Django
"""
setup(
name = "django-redis-cluster",
url = "https://github.com/glumu/django-redis-cluster",
author = "Glen",
author_email = "glumu@126.com",
version = "1.0.0",
packages = [
"django_redis_cluster",
"django_redis_cluster.client",
"django_redis_cluster.serializers",
],
description = description.strip(),
install_requires = [
"Django>=1.9.6",
"redis>=2.10.5",
"redis-py-cluster>=1.2.0",
"msgpack-python>=0.4.7",
],
zip_safe = False,
include_package_data = True,
package_data = {
"": ["*.html"],
},
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Environment :: Web Environment",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
],
)
|
{
"content_hash": "25652bf17103c81292382a0191fe7feb",
"timestamp": "",
"source": "github",
"line_count": 46,
"max_line_length": 58,
"avg_line_length": 30.152173913043477,
"alnum_prop": 0.5652487382840663,
"repo_name": "glumu/django-redis-cluster",
"id": "c9d55495bdd0021d5fe867d73fc81492723668ca",
"size": "1402",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "24488"
}
],
"symlink_target": ""
}
|
"""linebot.models.things module."""
from abc import ABCMeta
from future.utils import with_metaclass
from .base import Base
class Things(with_metaclass(ABCMeta, Base)):
"""Abstract Base Class of Things."""
def __init__(self, device_id=None, **kwargs):
"""__init__ method.
:param str device_id: Device ID.
:param kwargs:
"""
super(Things, self).__init__(**kwargs)
self.device_id = device_id
class DeviceLink(Things):
"""DeviceLink.
https://developers.line.biz/en/reference/messaging-api/#device-link-event
Indicates that a user linked a device with LINE.
"""
def __init__(self, device_id=None, **kwargs):
"""__init__ method.
:param str device_id: Device ID of the device that has been linked with LINE.
:param kwargs:
"""
super(DeviceLink, self).__init__(device_id=device_id, **kwargs)
self.type = 'link'
class DeviceUnlink(Things):
"""DeviceUnlink.
https://developers.line.biz/en/reference/messaging-api/#device-unlink-event
Indicates that the user unlinked a device from LINE.
"""
def __init__(self, device_id=None, **kwargs):
"""__init__ method.
:param str device_id: Device ID of the device that was unlinked from LINE.
:param kwargs:
"""
super(DeviceUnlink, self).__init__(device_id=device_id, **kwargs)
self.type = 'unlink'
class ScenarioResult(Things):
"""ScenarioResult.
https://developers.line.biz/en/reference/messaging-api/#scenario-result-event
Indicates that an automatic communication scenario has been executed.
"""
def __init__(self, device_id=None, result=None, **kwargs):
"""__init__ method.
:param str device_id: Device ID of the device that executed the scenario.
:param str result: ScenarioResultPayload object.
:param kwargs:
"""
super(ScenarioResult, self).__init__(device_id=device_id, **kwargs)
self.type = 'scenarioResult'
self.result = self.get_or_new_from_json_dict(
result, ScenarioResultPayload
)
class ScenarioResultPayload(Base):
"""ScenarioResultPayload."""
def __init__(self, scenario_id=None, revision=None, start_time=None,
result_code=None, end_time=None, action_results=None,
ble_notification_payload=None, error_reason=None, **kwargs):
"""__init__ method.
:param str scenario_id: Scenario ID executed.
:param long revision: Revision number.
:param long start_time: Timestamp for when execution of scenario
action started (milliseconds).
:param long end_time: Timestamp for when execution of scenario
was completed (milliseconds).
:param str result_code: Scenario execution completion status.
:param action_results: Array of actions specified in a scenario.
:type action_results: list[T <= :py:class:`linebot.models.things.ActionResult`]
:param str ble_notification_payload: Data contained in notification.
:param str error_reason: Error response.
:param kwargs:
"""
super(ScenarioResultPayload, self).__init__(**kwargs)
self.scenario_id = scenario_id
self.revision = revision
self.start_time = start_time
self.end_time = end_time
self.result_code = result_code
self.action_results = [self.get_or_new_from_json_dict(it, ActionResult)
for it in action_results]
self.ble_notification_payload = ble_notification_payload
self.error_reason = error_reason
class ActionResult(Base):
"""ActionResult.
Execution result of individual operations specified in action
"""
def __init__(self, type=None, data=None, **kwargs):
"""__init__ method.
:param str type: Type of the executed action.
:param str data: Base64-encoded binary data.
:param kwargs:
"""
super(ActionResult, self).__init__(**kwargs)
self.type = type
self.data = data
|
{
"content_hash": "70623a430e877dffcf961a0eb2e597df",
"timestamp": "",
"source": "github",
"line_count": 135,
"max_line_length": 87,
"avg_line_length": 30.644444444444446,
"alnum_prop": 0.6241237611795988,
"repo_name": "line/line-bot-sdk-python",
"id": "cd23ad4aac62a386d93e35ffd09f6dd9e2d333fb",
"size": "4718",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "linebot/models/things.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "649062"
}
],
"symlink_target": ""
}
|
"""
test_aioh2
----------------------------------
Tests for `aioh2` module.
"""
import random
import unittest
import uuid
import asyncio
from h2.events import DataReceived
from h2.events import PingAcknowledged
from h2.events import RemoteSettingsChanged
from h2.events import ResponseReceived
from h2.events import SettingsAcknowledged
from h2.exceptions import FlowControlError
from h2.settings import SettingCodes
from aioh2 import SendException
from aioh2.helper import async_task
from . import async_test, BaseTestCase
class TestServer(BaseTestCase):
def test_connect(self):
pass
@async_test
def test_ping(self):
opaque_data = uuid.uuid4().bytes[:8]
self.conn.ping(opaque_data)
events = yield from self._expect_events()
self.assertIsInstance(events[0], PingAcknowledged)
self.assertEqual(events[0].ping_data, opaque_data)
@async_test
def test_request_headers(self):
yield from self._send_headers()
@asyncio.coroutine
def _test_read_frame(self, *, more, end_stream):
stream_id = yield from self._send_headers()
data = b'x' * random.randint(128, 512)
self.conn.send_data(stream_id, data,
end_stream=not more and end_stream)
extra = b''
if more:
extra = b'y' * random.randint(128, 512)
self.conn.send_data(stream_id, extra, end_stream=end_stream)
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id), data)
if more:
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id), extra)
if end_stream:
frame = yield from self.server.read_stream(stream_id)
self.assertEqual(frame, b'')
else:
try:
yield from asyncio.wait_for(
self.server.read_stream(stream_id), 0.1)
except asyncio.TimeoutError:
pass
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_read_frame(self):
yield from self._test_read_frame(more=True, end_stream=False)
@async_test
def test_read_frame_close(self):
yield from self._test_read_frame(more=True, end_stream=True)
@async_test
def test_read_only_frame(self):
yield from self._test_read_frame(more=False, end_stream=False)
@async_test
def test_read_only_frame_close(self):
yield from self._test_read_frame(more=False, end_stream=True)
@asyncio.coroutine
def _test_read_all(self, *, more, end_stream):
stream_id = yield from self._send_headers()
data = b'x' * random.randint(128, 512)
self.conn.send_data(stream_id, data,
end_stream=not more and end_stream)
if more:
extra = b'y' * random.randint(128, 512)
self.conn.send_data(stream_id, extra, end_stream=end_stream)
data += extra
yield from self._expect_connection_flow_control_disabled()
if end_stream:
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, -1), data)
frame = yield from self.server.read_stream(stream_id, -1)
self.assertEqual(frame, b'')
else:
try:
yield from asyncio.wait_for(
self.server.read_stream(stream_id, -1), 0.1)
except asyncio.TimeoutError:
pass
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_read_all(self):
yield from self._test_read_all(more=True, end_stream=False)
@async_test
def test_read_all_close(self):
yield from self._test_read_all(more=True, end_stream=True)
@async_test
def test_read_all_only_frame(self):
yield from self._test_read_all(more=False, end_stream=False)
@async_test
def test_read_all_only_frame_close(self):
yield from self._test_read_all(more=False, end_stream=True)
@asyncio.coroutine
def _test_read_exactly(self, *, empty, explicit_close):
stream_id = yield from self._send_headers()
self.conn.send_data(stream_id, b'333')
self.conn.send_data(stream_id, b'55555')
if empty:
self.conn.send_data(stream_id, b'')
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'33')
self.conn.send_data(stream_id, b'88888888',
end_stream=not explicit_close)
if explicit_close:
self.conn.end_stream(stream_id)
yield from self._expect_events(0)
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 8), b'35555588')
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'88')
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 8), b'8888')
@async_test
def test_read_exactly(self):
yield from self._test_read_exactly(empty=False, explicit_close=False)
@async_test
def test_read_exactly_empty_frame(self):
yield from self._test_read_exactly(empty=True, explicit_close=False)
@async_test
def test_read_exactly_explicit_close(self):
yield from self._test_read_exactly(empty=True, explicit_close=True)
@async_test
def test_read_exactly_empty_frame_explicit_close(self):
yield from self._test_read_exactly(empty=True, explicit_close=True)
@async_test
def test_flow_control_settings(self):
self.server.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], RemoteSettingsChanged)
event = yield from self.server.events.get()
self.assertIsInstance(event, SettingsAcknowledged)
stream_id = yield from self._send_headers()
self.conn.send_data(stream_id, b'xx')
yield from self._expect_connection_flow_control_disabled()
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 2), b'xx')
self.conn.send_data(stream_id, b'xxx')
yield from self._expect_events(0)
yield from self._assert_received(
stream_id, self.server.read_stream(stream_id, 3), b'xxx')
self.assertRaises(FlowControlError,
self.conn.send_data, stream_id, b'xxxx')
@async_test
def test_flow_control(self):
self.conn.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_id = yield from self._send_headers(end_stream=True)
yield from self.server.start_response(stream_id, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
yield from self.server.send_data(stream_id, b'12')
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'12')
try:
yield from asyncio.wait_for(
self.server.send_data(stream_id, b'34'), 0.1)
except asyncio.TimeoutError:
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'3')
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
self.conn.increment_flow_control_window(3, stream_id=stream_id)
yield from self._expect_events(0)
try:
yield from asyncio.wait_for(
self.server.send_data(stream_id, b'5678'), 0.1)
except asyncio.TimeoutError:
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'567')
else:
self.assertRaises(asyncio.TimeoutError, lambda: None)
@async_test
def test_broken_send(self):
self.conn.update_settings({SettingCodes.INITIAL_WINDOW_SIZE: 3})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_id = yield from self._send_headers(end_stream=True)
yield from self.server.start_response(stream_id, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
yield from self.server.send_data(stream_id, b'12')
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'12')
f = async_task(self.server.send_data(stream_id, b'345678'))
events = yield from self._expect_events()
self.assertIsInstance(events[0], DataReceived)
self.assertEqual(events[0].data, b'3')
self.conn.reset_stream(stream_id)
yield from self._expect_events(0)
try:
yield from f
except SendException as e:
self.assertEqual(e.data, b'45678')
else:
self.assertRaises(SendException, lambda: None)
@unittest.skip("flakey - https://github.com/decentfox/aioh2/issues/17")
@async_test(timeout=8)
def test_priority(self):
self.conn.update_settings({
SettingCodes.MAX_FRAME_SIZE: 16384,
SettingCodes.INITIAL_WINDOW_SIZE: 16384 * 1024 * 32,
})
event = yield from self._expect_events()
self.assertIsInstance(event[0], SettingsAcknowledged)
event = yield from self.server.events.get()
self.assertIsInstance(event, RemoteSettingsChanged)
stream_1 = yield from self._send_headers()
yield from self.server.start_response(stream_1, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
stream_2 = yield from self._send_headers()
yield from self.server.start_response(stream_2, [(':status', '200')])
events = yield from self._expect_events()
self.assertIsInstance(events[0], ResponseReceived)
p1 = 32
p2 = 20
self.server.reprioritize(stream_1, weight=p1)
self.server.reprioritize(stream_2, weight=p2)
self.server.pause_writing()
running = [True]
@asyncio.coroutine
def _write(stream_id):
count = 0
while running[0]:
yield from self.server.send_data(stream_id, b'x')
count += 1
yield from self.server.end_stream(stream_id)
return count
task_1 = async_task(_write(stream_1))
task_2 = async_task(_write(stream_2))
for i in range(1000):
self.server.resume_writing()
yield from asyncio.sleep(0.004)
self.server.pause_writing()
yield from asyncio.sleep(0.001)
running[0] = False
self.server.resume_writing()
count_1 = yield from task_1
count_2 = yield from task_2
self.assertAlmostEqual(count_1 / count_2, p1 / p2, 1)
if __name__ == '__main__':
import sys
sys.exit(unittest.main())
|
{
"content_hash": "12adff4f167d35176f367a23e3e3c233",
"timestamp": "",
"source": "github",
"line_count": 337,
"max_line_length": 78,
"avg_line_length": 35.210682492581604,
"alnum_prop": 0.6176470588235294,
"repo_name": "decentfox/aioh2",
"id": "7c32349a6a91ea196018a8b2da7a686e2b75eb79",
"size": "11889",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "tests/test_aioh2.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Makefile",
"bytes": "2070"
},
{
"name": "Python",
"bytes": "53334"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, print_function, division
import utool as ut
import ubelt as ub
import six
def extract_feature_from_patch(patch):
import pyhesaff
import numpy as np
import vtool_ibeis as vt
patch = vt.rectify_to_uint8(patch)
patch = vt.rectify_to_square(patch)
patch_list = np.ascontiguousarray(patch[None, :])
vec = pyhesaff.extract_desc_from_patches(patch_list)[0]
return vec
def extract_features(img_or_fpath, feat_type='hesaff+sift', **kwargs):
r"""
calls pyhesaff's main driver function for detecting hessian affine keypoints.
extra parameters can be passed to the hessian affine detector by using
kwargs.
Args:
img_or_fpath (str): image file path on disk
use_adaptive_scale (bool):
nogravity_hack (bool):
Returns:
tuple : (kpts, vecs)
CommandLine:
python -m vtool_ibeis.features --test-extract_features
python -m vtool_ibeis.features --test-extract_features --show
python -m vtool_ibeis.features --test-extract_features --feat-type=hesaff+siam128 --show
python -m vtool_ibeis.features --test-extract_features --feat-type=hesaff+siam128 --show
python -m vtool_ibeis.features --test-extract_features --feat-type=hesaff+siam128 --show --no-affine-invariance
Example:
>>> # ENABLE_DOCTEST
>>> from vtool_ibeis.features import * # NOQA
>>> import vtool_ibeis as vt
>>> # build test data
>>> img_fpath = ut.grab_test_imgpath(ut.get_argval('--fname', default='lena.png'))
>>> imgBGR = vt.imread(img_fpath)
>>> feat_type = ub.argval('--feat_type', default='hesaff+sift')
>>> import pyhesaff
>>> kwargs = ut.parse_dict_from_argv(pyhesaff.get_hesaff_default_params())
>>> # execute function
>>> #(kpts, vecs) = extract_features(img_fpath)
>>> (kpts, vecs) = extract_features(imgBGR, feat_type, **kwargs)
>>> # verify results
>>> result = str((kpts, vecs))
>>> print(result)
>>> # Show keypoints
>>> # xdoctest: +REQUIRES(--show)
>>> import plottool_ibeis as pt
>>> #pt.figure(fnum=1, doclf=True, docla=True)
>>> #pt.imshow(imgBGR)
>>> #pt.draw_kpts2(kpts, ori=True)
>>> pt.interact_keypoints.ishow_keypoints(imgBGR, kpts, vecs, ori=True, ell_alpha=.4, color='distinct')
>>> pt.show_if_requested()
"""
import pyhesaff
if feat_type == 'hesaff+sift':
#(kpts, vecs) = pyhesaff.detect_feats(img_fpath, **kwargs)
(kpts, vecs) = pyhesaff.detect_feats2(img_or_fpath, **kwargs)
elif feat_type == 'hesaff+siam128':
# hacky
from ibeis_cnn import _plugin
(kpts, sift) = pyhesaff.detect_feats2(img_or_fpath, **kwargs)
if isinstance(img_or_fpath, six.string_types):
import vtool_ibeis as vt
img_or_fpath = vt.imread(img_or_fpath)
vecs_list = _plugin.extract_siam128_vecs([img_or_fpath], [kpts])
vecs = vecs_list[0]
pass
else:
raise AssertionError('Unknown feat_type=%r' % (feat_type,))
return (kpts, vecs)
def get_extract_features_default_params():
r"""
Returns:
dict:
CommandLine:
python -m vtool_ibeis.features --test-get_extract_features_default_params
Example:
>>> # ENABLE_DOCTEST
>>> from vtool_ibeis.features import * # NOQA
>>> # build test data
>>> # execute function
>>> param_dict = get_extract_features_default_params()
>>> result = ub.repr2(param_dict)
>>> # verify results
>>> print(result)
"""
import pyhesaff
param_dict = pyhesaff.get_hesaff_default_params()
return param_dict
def detect_opencv_keypoints():
import cv2
import vtool_ibeis as vt
import numpy as np # NOQA
#img_fpath = ut.grab_test_imgpath(ub.argval('--fname', default='lena.png'))
img_fpath = ut.grab_test_imgpath(ub.argval('--fname', default='zebra.png'))
imgBGR = vt.imread(img_fpath)
imgGray = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY)
def from_cv2_kpts(cv2_kp):
kp = (cv2_kp.pt[0], cv2_kp.pt[1], cv2_kp.size, 0, cv2_kp.size, cv2_kp.angle)
return kp
print('\n'.join(ut.search_module(cv2, 'create', recursive=True)))
detect_factory = {
#'BLOB': cv2.SimpleBlobDetector_create,
#'HARRIS' : HarrisWrapper.create,
#'SIFT': cv2.xfeatures2d.SIFT_create, # really DoG
'SURF': cv2.xfeatures2d.SURF_create, # really harris corners
'MSER': cv2.MSER_create,
#'StarDetector_create',
}
extract_factory = {
'SIFT': cv2.xfeatures2d.SIFT_create,
'SURF': cv2.xfeatures2d.SURF_create,
#'DAISY': cv2.xfeatures2d.DAISY_create,
'FREAK': cv2.xfeatures2d.FREAK_create,
#'LATCH': cv2.xfeatures2d.LATCH_create,
#'LUCID': cv2.xfeatures2d.LUCID_create,
#'ORB': cv2.ORB_create,
}
mask = None
type_to_kpts = {}
type_to_desc = {}
key = 'BLOB'
key = 'MSER'
for key in detect_factory.keys():
factory = detect_factory[key]
extractor = factory()
# For MSERS need to adapt shape and then convert into a keypoint repr
if hasattr(extractor, 'detectRegions'):
# bboxes are x,y,w,h
regions, bboxes = extractor.detectRegions(imgGray)
# ellipse definition from [Fitzgibbon95]
# http://www.bmva.org/bmvc/1995/bmvc-95-050.pdf p518
# ell = [c_x, c_y, R_x, R_y, theta]
# (cx, cy) = conic center
# Rx and Ry = conic radii
# theta is the counterclockwise angle
fitz_ellipses = [cv2.fitEllipse(mser) for mser in regions]
# http://answers.opencv.org/question/19015/how-to-use-mser-in-python/
#hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in regions]
#hull_ells = [cv2.fitEllipse(hull[:, 0]) for hull in hulls]
kpts_ = []
for ell in fitz_ellipses:
((cx, cy), (rx, ry), degrees) = ell
theta = np.radians(degrees) # opencv lives in radians
S = vt.scale_mat3x3(rx, ry)
T = vt.translation_mat3x3(cx, cy)
R = vt.rotation_mat3x3(theta)
#R = np.eye(3)
invVR = T.dot(R.dot(S))
kpt = vt.flatten_invV_mats_to_kpts(np.array([invVR]))[0]
kpts_.append(kpt)
kpts_ = np.array(kpts_)
tt = ut.tic('Computing %r keypoints' % (key,))
try:
cv2_kpts = extractor.detect(imgGray, mask)
except Exception as ex:
ut.printex(ex, 'Failed to computed %r keypoints' % (key,), iswarning=True)
pass
else:
ut.toc(tt)
type_to_kpts[key] = cv2_kpts
print(list(type_to_kpts.keys()))
print(ut.depth_profile(list(type_to_kpts.values())))
print('type_to_kpts = ' + ub.repr2(type_to_kpts, truncate=True))
cv2_kpts = type_to_kpts['MSER']
kp = cv2_kpts[0] # NOQA
#cv2.fitEllipse(cv2_kpts[0])
cv2_kpts = type_to_kpts['SURF']
for key in extract_factory.keys():
factory = extract_factory[key]
extractor = factory()
tt = ut.tic('Computing %r descriptors' % (key,))
try:
filtered_cv2_kpts, desc = extractor.compute(imgGray, cv2_kpts)
except Exception as ex:
ut.printex(ex, 'Failed to computed %r descriptors' % (key,), iswarning=True)
pass
else:
ut.toc(tt)
type_to_desc[key] = desc
print(list(type_to_desc.keys()))
print(ut.depth_profile(list(type_to_desc.values())))
print('type_to_desc = ' + ub.repr2(type_to_desc, truncate=True))
def test_mser():
import cv2
import vtool_ibeis as vt
import plottool_ibeis as pt
import numpy as np
pt.qt4ensure()
class Keypoints(ut.NiceRepr):
"""
Convinence class for dealing with keypoints
"""
def __init__(self, kparr, info=None):
self.kparr = kparr
if info is None:
info = {}
self.info = info
def add_info(self, key, val):
self.info[key] = val
def __nice__(self):
return ' ' + str(len(self.kparr))
@property
def scale(self):
return vt.get_scales(self.kparr)
@property
def eccentricity(self):
return vt.get_kpts_eccentricity(self.kparr)
def compress(self, flags, inplace=False):
subarr = self.kparr.compress(flags, axis=0)
info = {key: list(ub.compress(val, flags)) for key, val in self.info.items()}
return Keypoints(subarr, info)
img_fpath = ut.grab_test_imgpath(ub.argval('--fname', default='zebra.png'))
imgBGR = vt.imread(img_fpath)
imgGray = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY)
# http://docs.opencv.org/master/d3/d28/classcv_1_1MSER.html#gsc.tab=0
# http://stackoverflow.com/questions/17647500/exact-meaning-of-the-parameters-given-to-initialize-mser-in-opencv-2-4-x
factory = cv2.MSER_create
img_area = np.product(np.array(vt.get_size(imgGray)))
_max_area = (img_area // 10)
_delta = 8
_min_diversity = .5
extractor = factory(_delta=_delta, _max_area=_max_area, _min_diversity=_min_diversity)
# bboxes are x,y,w,h
regions, bboxes = extractor.detectRegions(imgGray)
# ellipse definition from [Fitzgibbon95]
# http://www.bmva.org/bmvc/1995/bmvc-95-050.pdf p518
# ell = [c_x, c_y, R_x, R_y, theta]
# (cx, cy) = conic center
# Rx and Ry = conic radii
# theta is the counterclockwise angle
fitz_ellipses = [cv2.fitEllipse(mser) for mser in regions]
# http://answers.opencv.org/question/19015/how-to-use-mser-in-python/
#hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in regions]
#hull_ells = [cv2.fitEllipse(hull[:, 0]) for hull in hulls]
invVR_mats = []
for ell in fitz_ellipses:
((cx, cy), (dx, dy), degrees) = ell
theta = np.radians(degrees) # opencv lives in radians
# Convert diameter to radians
rx = dx / 2
ry = dy / 2
S = vt.scale_mat3x3(rx, ry)
T = vt.translation_mat3x3(cx, cy)
R = vt.rotation_mat3x3(theta)
invVR = T.dot(R.dot(S))
invVR_mats.append(invVR)
invVR_mats = np.array(invVR_mats)
#_oris = vt.get_invVR_mats_oris(invVR_mats)
kpts2_ = vt.flatten_invV_mats_to_kpts(invVR_mats)
self = Keypoints(kpts2_)
self.add_info('regions', regions)
flags = (self.eccentricity < .9)
#flags = self.scale < np.mean(self.scale)
#flags = self.scale < np.median(self.scale)
self = self.compress(flags)
import plottool_ibeis as pt
#pt.interact_keypoints.ishow_keypoints(imgBGR, self.kparr, None, ell_alpha=.4, color='distinct', fnum=2)
#import plottool_ibeis as pt
vis = imgBGR.copy()
for region in self.info['regions']:
vis[region.T[1], region.T[0], :] = 0
#regions, bbox = mser.detectRegions(gray)
#hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in self.info['regions']]
#cv2.polylines(vis, hulls, 1, (0, 255, 0))
#for region in self.info['regions']:
# ell = cv2.fitEllipse(region)
# cv2.ellipse(vis, ell, (255))
pt.interact_keypoints.ishow_keypoints(vis, self.kparr, None, ell_alpha=.4, color='distinct', fnum=2)
#pt.imshow(vis, fnum=2)
pt.update()
#extractor = extract_factory['DAISY']()
#desc_type_to_dtype = {
# cv2.CV_8U: np.uint8,
# cv2.CV_8s: np.uint,
#}
#def alloc_desc(extractor):
# desc_type = extractor.descriptorType()
# desc_size = extractor.descriptorSize()
# dtype = desc_type_to_dtype[desc_type]
# shape = (len(cv2_kpts), desc_size)
# desc = np.empty(shape, dtype=dtype)
# return desc
#ut.search_module(cv2, 'array', recursive=True)
#ut.search_module(cv2, 'freak', recursive=True)
#ut.search_module(cv2, 'out', recursive=True)
#cv2_kpts = cv2_kpts[0:2]
#for key, factory in just_desc_factory_.items():
# extractor = factory()
# desc = alloc_desc(extractor)
# desc = extractor.compute(imgGray, cv2_kpts)
# feats[key] = (desc,)
# #extractor.compute(imgGray, cv2_kpts, desc)
# pass
#kpts = np.array(list(map(from_cv2_kpts, cv2_kpts)))
#orb = cv2.ORB()
#kp1, des1 = orb.detectAndCompute(imgGray, None)
#blober = cv2.SimpleBlobDetector_create()
#haris_kpts = cv2.cornerHarris(imgGray, 2, 3, 0.04)
#[name for name in dir(cv2) if 'mat' in name.lower()]
#[name for name in dir(cv2.xfeatures2d) if 'desc' in name.lower()]
#[name for name in dir(cv2) if 'detect' in name.lower()]
#[name for name in dir(cv2) if 'extract' in name.lower()]
#[name for name in dir(cv2) if 'ellip' in name.lower()]
#sift = cv2.xfeatures2d.SIFT_create()
#cv2_kpts = sift.detect(imgGray)
#desc = sift.compute(imgGray, cv2_kpts)[1]
#freak = cv2.xfeatures2d.FREAK_create()
#cv2_kpts = freak.detect(imgGray)
#desc = freak.compute(imgGray, cv2_kpts)[1]
pass
if __name__ == '__main__':
"""
CommandLine:
xdoctest -m vtool_ibeis.features
"""
import xdoctest
xdoctest.doctest_module(__file__)
|
{
"content_hash": "9a5986f6965028a79ef97bbc46244989",
"timestamp": "",
"source": "github",
"line_count": 376,
"max_line_length": 122,
"avg_line_length": 35.63297872340426,
"alnum_prop": 0.5950141812210777,
"repo_name": "Erotemic/vtool",
"id": "e521bcd7cf117e1990895fa06696d95ed3380a33",
"size": "13422",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev/2.2.0",
"path": "vtool_ibeis/features.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "608"
},
{
"name": "C++",
"bytes": "14592"
},
{
"name": "CMake",
"bytes": "4509"
},
{
"name": "Python",
"bytes": "1569183"
},
{
"name": "Shell",
"bytes": "18978"
}
],
"symlink_target": ""
}
|
import _plotly_utils.basevalidators
class LineValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="line", parent_name="layout.selection", **kwargs):
super(LineValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Line"),
data_docs=kwargs.pop(
"data_docs",
"""
color
Sets the line color.
dash
Sets the dash style of lines. Set to a dash
type string ("solid", "dot", "dash",
"longdash", "dashdot", or "longdashdot") or a
dash length list in px (eg "5px,10px,2px,2px").
width
Sets the line width (in px).
""",
),
**kwargs,
)
|
{
"content_hash": "960b90b12feeacdfcc606b4ad0e799cc",
"timestamp": "",
"source": "github",
"line_count": 25,
"max_line_length": 85,
"avg_line_length": 35.44,
"alnum_prop": 0.5079006772009029,
"repo_name": "plotly/plotly.py",
"id": "7017d95469e079031b0bb1fdb9f51bef8fe9b98a",
"size": "886",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "packages/python/plotly/plotly/validators/layout/selection/_line.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "545"
},
{
"name": "JavaScript",
"bytes": "2074"
},
{
"name": "PostScript",
"bytes": "565328"
},
{
"name": "Python",
"bytes": "31506317"
},
{
"name": "TypeScript",
"bytes": "71337"
}
],
"symlink_target": ""
}
|
from .base import Pool
from .. import event
from ..engine.base import Engine
class PoolEvents(event.Events):
"""Available events for :class:`_pool.Pool`.
The methods here define the name of an event as well
as the names of members that are passed to listener
functions.
e.g.::
from sqlalchemy import event
def my_on_checkout(dbapi_conn, connection_rec, connection_proxy):
"handle an on checkout event"
event.listen(Pool, 'checkout', my_on_checkout)
In addition to accepting the :class:`_pool.Pool` class and
:class:`_pool.Pool` instances, :class:`_events.PoolEvents` also accepts
:class:`_engine.Engine` objects and the :class:`_engine.Engine` class as
targets, which will be resolved to the ``.pool`` attribute of the
given engine or the :class:`_pool.Pool` class::
engine = create_engine("postgresql://scott:tiger@localhost/test")
# will associate with engine.pool
event.listen(engine, 'checkout', my_on_checkout)
"""
_target_class_doc = "SomeEngineOrPool"
_dispatch_target = Pool
@classmethod
def _accept_with(cls, target):
if isinstance(target, type):
if issubclass(target, Engine):
return Pool
elif issubclass(target, Pool):
return target
elif isinstance(target, Engine):
return target.pool
else:
return target
@classmethod
def _listen(cls, event_key, **kw):
target = event_key.dispatch_target
kw.setdefault("asyncio", target._is_asyncio)
event_key.base_listen(**kw)
def connect(self, dbapi_connection, connection_record):
"""Called at the moment a particular DBAPI connection is first
created for a given :class:`_pool.Pool`.
This event allows one to capture the point directly after which
the DBAPI module-level ``.connect()`` method has been used in order
to produce a new DBAPI connection.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def first_connect(self, dbapi_connection, connection_record):
"""Called exactly once for the first time a DBAPI connection is
checked out from a particular :class:`_pool.Pool`.
The rationale for :meth:`_events.PoolEvents.first_connect`
is to determine
information about a particular series of database connections based
on the settings used for all connections. Since a particular
:class:`_pool.Pool`
refers to a single "creator" function (which in terms
of a :class:`_engine.Engine`
refers to the URL and connection options used),
it is typically valid to make observations about a single connection
that can be safely assumed to be valid about all subsequent
connections, such as the database version, the server and client
encoding settings, collation settings, and many others.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def checkout(self, dbapi_connection, connection_record, connection_proxy):
"""Called when a connection is retrieved from the Pool.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param connection_proxy: the :class:`._ConnectionFairy` object which
will proxy the public interface of the DBAPI connection for the
lifespan of the checkout.
If you raise a :class:`~sqlalchemy.exc.DisconnectionError`, the current
connection will be disposed and a fresh connection retrieved.
Processing of all checkout listeners will abort and restart
using the new connection.
.. seealso:: :meth:`_events.ConnectionEvents.engine_connect`
- a similar event
which occurs upon creation of a new :class:`_engine.Connection`.
"""
def checkin(self, dbapi_connection, connection_record):
"""Called when a connection returns to the pool.
Note that the connection may be closed, and may be None if the
connection has been invalidated. ``checkin`` will not be called
for detached connections. (They do not return to the pool.)
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def reset(self, dbapi_connection, connection_record):
"""Called before the "reset" action occurs for a pooled connection.
This event represents
when the ``rollback()`` method is called on the DBAPI connection
before it is returned to the pool. The behavior of "reset" can
be controlled, including disabled, using the ``reset_on_return``
pool argument.
The :meth:`_events.PoolEvents.reset` event is usually followed by the
:meth:`_events.PoolEvents.checkin` event is called, except in those
cases where the connection is discarded immediately after reset.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
.. seealso::
:meth:`_events.ConnectionEvents.rollback`
:meth:`_events.ConnectionEvents.commit`
"""
def invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked, either from API usage or via "auto-invalidation",
without the ``soft`` flag.
The event occurs before a final attempt to call ``.close()`` on the
connection occurs.
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
.. versionadded:: 0.9.2 Added support for connection invalidation
listening.
.. seealso::
:ref:`pool_connection_invalidation`
"""
def soft_invalidate(self, dbapi_connection, connection_record, exception):
"""Called when a DBAPI connection is to be "soft invalidated".
This event is called any time the :meth:`._ConnectionRecord.invalidate`
method is invoked with the ``soft`` flag.
Soft invalidation refers to when the connection record that tracks
this connection will force a reconnect after the current connection
is checked in. It does not actively close the dbapi_connection
at the point at which it is called.
.. versionadded:: 1.0.3
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
:param exception: the exception object corresponding to the reason
for this invalidation, if any. May be ``None``.
"""
def close(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close operation fails,
the connection is discarded.
The :meth:`.close` event corresponds to a connection that's still
associated with the pool. To intercept close events for detached
connections use :meth:`.close_detached`.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def detach(self, dbapi_connection, connection_record):
"""Called when a DBAPI connection is "detached" from a pool.
This event is emitted after the detach occurs. The connection
is no longer associated with the given connection record.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
:param connection_record: the :class:`._ConnectionRecord` managing the
DBAPI connection.
"""
def close_detached(self, dbapi_connection):
"""Called when a detached DBAPI connection is closed.
The event is emitted before the close occurs.
The close of a connection can fail; typically this is because
the connection is already closed. If the close operation fails,
the connection is discarded.
.. versionadded:: 1.1
:param dbapi_connection: a DBAPI connection.
The :attr:`._ConnectionRecord.dbapi_connection` attribute.
"""
|
{
"content_hash": "ad91f13cacaf8891f4812740511516d8",
"timestamp": "",
"source": "github",
"line_count": 271,
"max_line_length": 79,
"avg_line_length": 36.15867158671587,
"alnum_prop": 0.6601694050413307,
"repo_name": "monetate/sqlalchemy",
"id": "7c2cae7c5eb0fe1f275cb83433ada71e45ec0bff",
"size": "10046",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "lib/sqlalchemy/pool/events.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "49142"
},
{
"name": "Python",
"bytes": "11790244"
}
],
"symlink_target": ""
}
|
import qrcode
from pyqrcode import QRCode
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse
import http.cookiejar
import requests
import xml.dom.minidom
import json
import time
import ssl
import re
import sys
import os
import subprocess
import random
import multiprocessing
import platform
import logging
import http.client
from collections import defaultdict
from urllib.parse import urlparse
from lxml import html
from socket import timeout as timeout_error
#import sys
#from PIL import Image
#import pdb
# for media upload
import mimetypes
from requests_toolbelt.multipart.encoder import MultipartEncoder
def catchKeyboardInterrupt(fn):
def wrapper(*args):
try:
return fn(*args)
except KeyboardInterrupt:
print('\n[*] 强制退出程序')
logging.debug('[*] 强制退出程序')
return wrapper
def _decode_list(data):
rv = []
for item in data:
if isinstance(item, str):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if isinstance(key, str):
key = key.encode('utf-8')
if isinstance(value, str):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
class WebWeixin(object):
def __str__(self):
description = \
"=========================\n" + \
"[#] Web Weixin\n" + \
"[#] Debug Mode: " + str(self.DEBUG) + "\n" + \
"[#] Uuid: " + self.uuid + "\n" + \
"[#] Uin: " + str(self.uin) + "\n" + \
"[#] Sid: " + self.sid + "\n" + \
"[#] Skey: " + self.skey + "\n" + \
"[#] DeviceId: " + self.deviceId + "\n" + \
"[#] PassTicket: " + self.pass_ticket + "\n" + \
"========================="
return description
def __init__(self):
self.DEBUG = False
self.commandLineQRCode = False
self.uuid = ''
self.base_uri = ''
self.redirect_uri = ''
self.uin = ''
self.sid = ''
self.skey = ''
self.pass_ticket = ''
self.deviceId = 'e' + repr(random.random())[2:17]
self.BaseRequest = {}
self.synckey = ''
self.SyncKey = []
self.User = []
self.MemberList = []
self.ContactList = [] # 好友
self.GroupList = [] # 群
self.GroupMemeberList = [] # 群友
self.PublicUsersList = [] # 公众号/服务号
self.SpecialUsersList = [] # 特殊账号
self.autoReplyMode = False
self.syncHost = ''
self.user_agent = 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.109 Safari/537.36'
self.interactive = False
self.autoOpen = False
self.saveFolder = os.path.join(os.getcwd(), 'saved')
self.saveSubFolders = {'webwxgeticon': 'icons', 'webwxgetheadimg': 'headimgs', 'webwxgetmsgimg': 'msgimgs',
'webwxgetvideo': 'videos', 'webwxgetvoice': 'voices', '_showQRCodeImg': 'qrcodes'}
self.appid = 'wx782c26e4c19acffb'
self.lang = 'zh_CN'
self.lastCheckTs = time.time()
self.memberCount = 0
self.SpecialUsers = ['newsapp', 'fmessage', 'filehelper', 'weibo', 'qqmail', 'fmessage', 'tmessage', 'qmessage', 'qqsync', 'floatbottle', 'lbsapp', 'shakeapp', 'medianote', 'qqfriend', 'readerapp', 'blogapp', 'facebookapp', 'masssendapp', 'meishiapp', 'feedsapp',
'voip', 'blogappweixin', 'weixin', 'brandsessionholder', 'weixinreminder', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c', 'officialaccounts', 'notification_messages', 'wxid_novlwrv3lqwv11', 'gh_22b87fa7cb3c', 'wxitil', 'userexperience_alarm', 'notification_messages']
self.TimeOut = 20 # 同步最短时间间隔(单位:秒)
self.media_count = -1
self.cookie = http.cookiejar.CookieJar()
opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(self.cookie))
opener.addheaders = [('User-agent', self.user_agent)]
urllib.request.install_opener(opener)
def loadConfig(self, config):
if config['DEBUG']:
self.DEBUG = config['DEBUG']
if config['autoReplyMode']:
self.autoReplyMode = config['autoReplyMode']
if config['user_agent']:
self.user_agent = config['user_agent']
if config['interactive']:
self.interactive = config['interactive']
if config['autoOpen']:
self.autoOpen = config['autoOpen']
def getUUID(self):
url = 'https://login.weixin.qq.com/jslogin'
params = {
'appid': self.appid,
'fun': 'new',
'lang': self.lang,
'_': int(time.time()),
}
#r = requests.get(url=url, params=params)
#r.encoding = 'utf-8'
#data = r.text
data = self._post(url, params, False).decode("utf-8")
if data == '':
return False
regx = r'window.QRLogin.code = (\d+); window.QRLogin.uuid = "(\S+?)"'
pm = re.search(regx, data)
if pm:
code = pm.group(1)
self.uuid = pm.group(2)
return code == '200'
return False
def genQRCode(self):
#return self._showQRCodeImg()
if sys.platform.startswith('win'):
self._showQRCodeImg('win')
elif sys.platform.find('darwin') >= 0:
self._showQRCodeImg('macos')
else:
self._str2qr('https://login.weixin.qq.com/l/' + self.uuid)
def _showQRCodeImg(self, str):
if self.commandLineQRCode:
qrCode = QRCode('https://login.weixin.qq.com/l/' + self.uuid)
self._showCommandLineQRCode(qrCode.text(1))
else:
url = 'https://login.weixin.qq.com/qrcode/' + self.uuid
params = {
't': 'webwx',
'_': int(time.time())
}
data = self._post(url, params, False)
if data == '':
return
QRCODE_PATH = self._saveFile('qrcode.jpg', data, '_showQRCodeImg')
if str == 'win':
os.startfile(QRCODE_PATH)
elif str == 'macos':
subprocess.call(["open", QRCODE_PATH])
else:
return
def _showCommandLineQRCode(self, qr_data, enableCmdQR=2):
try:
b = u'\u2588'
sys.stdout.write(b + '\r')
sys.stdout.flush()
except UnicodeEncodeError:
white = 'MM'
else:
white = b
black = ' '
blockCount = int(enableCmdQR)
if abs(blockCount) == 0:
blockCount = 1
white *= abs(blockCount)
if blockCount < 0:
white, black = black, white
sys.stdout.write(' ' * 50 + '\r')
sys.stdout.flush()
qr = qr_data.replace('0', white).replace('1', black)
sys.stdout.write(qr)
sys.stdout.flush()
def waitForLogin(self, tip=1):
time.sleep(tip)
url = 'https://login.weixin.qq.com/cgi-bin/mmwebwx-bin/login?tip=%s&uuid=%s&_=%s' % (
tip, self.uuid, int(time.time()))
data = self._get(url)
if data == '':
return False
pm = re.search(r"window.code=(\d+);", data)
code = pm.group(1)
if code == '201':
return True
elif code == '200':
pm = re.search(r'window.redirect_uri="(\S+?)";', data)
r_uri = pm.group(1) + '&fun=new'
self.redirect_uri = r_uri
self.base_uri = r_uri[:r_uri.rfind('/')]
return True
elif code == '408':
self._echo('[登陆超时] \n')
else:
self._echo('[登陆异常] \n')
return False
def login(self):
data = self._get(self.redirect_uri)
if data == '':
return False
doc = xml.dom.minidom.parseString(data)
root = doc.documentElement
for node in root.childNodes:
if node.nodeName == 'skey':
self.skey = node.childNodes[0].data
elif node.nodeName == 'wxsid':
self.sid = node.childNodes[0].data
elif node.nodeName == 'wxuin':
self.uin = node.childNodes[0].data
elif node.nodeName == 'pass_ticket':
self.pass_ticket = node.childNodes[0].data
if '' in (self.skey, self.sid, self.uin, self.pass_ticket):
return False
self.BaseRequest = {
'Uin': int(self.uin),
'Sid': self.sid,
'Skey': self.skey,
'DeviceID': self.deviceId,
}
return True
def webwxinit(self):
url = self.base_uri + '/webwxinit?pass_ticket=%s&skey=%s&r=%s' % (
self.pass_ticket, self.skey, int(time.time()))
params = {
'BaseRequest': self.BaseRequest
}
dic = self._post(url, params)
if dic == '':
return False
self.SyncKey = dic['SyncKey']
self.User = dic['User']
# synckey for synccheck
self.synckey = '|'.join(
[str(keyVal['Key']) + '_' + str(keyVal['Val']) for keyVal in self.SyncKey['List']])
return dic['BaseResponse']['Ret'] == 0
def webwxstatusnotify(self):
url = self.base_uri + \
'/webwxstatusnotify?lang=zh_CN&pass_ticket=%s' % (self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Code": 3,
"FromUserName": self.User['UserName'],
"ToUserName": self.User['UserName'],
"ClientMsgId": int(time.time())
}
dic = self._post(url, params)
if dic == '':
return False
return dic['BaseResponse']['Ret'] == 0
def webwxgetcontact(self):
SpecialUsers = self.SpecialUsers
url = self.base_uri + '/webwxgetcontact?pass_ticket=%s&skey=%s&r=%s' % (
self.pass_ticket, self.skey, int(time.time()))
dic = self._post(url, {})
if dic == '':
return False
self.MemberCount = dic['MemberCount']
self.MemberList = dic['MemberList']
ContactList = self.MemberList[:]
GroupList = self.GroupList[:]
PublicUsersList = self.PublicUsersList[:]
SpecialUsersList = self.SpecialUsersList[:]
for i in range(len(ContactList) - 1, -1, -1):
Contact = ContactList[i]
if Contact['VerifyFlag'] & 8 != 0: # 公众号/服务号
ContactList.remove(Contact)
self.PublicUsersList.append(Contact)
elif Contact['UserName'] in SpecialUsers: # 特殊账号
ContactList.remove(Contact)
self.SpecialUsersList.append(Contact)
elif '@@' in Contact['UserName']: # 群聊
ContactList.remove(Contact)
self.GroupList.append(Contact)
elif Contact['UserName'] == self.User['UserName']: # 自己
ContactList.remove(Contact)
self.ContactList = ContactList
return True
def webwxbatchgetcontact(self):
url = self.base_uri + \
'/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (
int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Count": len(self.GroupList),
"List": [{"UserName": g['UserName'], "EncryChatRoomId":""} for g in self.GroupList]
}
dic = self._post(url, params)
if dic == '':
return False
# blabla ...
ContactList = dic['ContactList']
ContactCount = dic['Count']
self.GroupList = ContactList
for i in range(len(ContactList) - 1, -1, -1):
Contact = ContactList[i]
MemberList = Contact['MemberList']
for member in MemberList:
self.GroupMemeberList.append(member)
return True
def getNameById(self, id):
url = self.base_uri + \
'/webwxbatchgetcontact?type=ex&r=%s&pass_ticket=%s' % (
int(time.time()), self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
"Count": 1,
"List": [{"UserName": id, "EncryChatRoomId": ""}]
}
dic = self._post(url, params)
if dic == '':
return None
# blabla ...
return dic['ContactList']
def testsynccheck(self):
SyncHost = ['wx2.qq.com',
'webpush.wx2.qq.com',
'wx8.qq.com',
'webpush.wx8.qq.com',
'qq.com',
'webpush.wx.qq.com',
'web2.wechat.com',
'webpush.web2.wechat.com',
'wechat.com',
'webpush.web.wechat.com',
'webpush.weixin.qq.com',
'webpush.wechat.com',
'webpush1.wechat.com',
'webpush2.wechat.com',
'webpush.wx.qq.com',
'webpush2.wx.qq.com']
for host in SyncHost:
self.syncHost = host
[retcode, selector] = self.synccheck()
if retcode == '0':
return True
return False
def synccheck(self):
params = {
'r': int(time.time()),
'sid': self.sid,
'uin': self.uin,
'skey': self.skey,
'deviceid': self.deviceId,
'synckey': self.synckey,
'_': int(time.time()),
}
url = 'https://' + self.syncHost + '/cgi-bin/mmwebwx-bin/synccheck?' + urllib.parse.urlencode(params)
data = self._get(url, timeout=5)
if data == '':
return [-1,-1]
pm = re.search(
r'window.synccheck={retcode:"(\d+)",selector:"(\d+)"}', data)
retcode = pm.group(1)
selector = pm.group(2)
return [retcode, selector]
def webwxsync(self):
url = self.base_uri + \
'/webwxsync?sid=%s&skey=%s&pass_ticket=%s' % (
self.sid, self.skey, self.pass_ticket)
params = {
'BaseRequest': self.BaseRequest,
'SyncKey': self.SyncKey,
'rr': ~int(time.time())
}
dic = self._post(url, params)
if dic == '':
return None
if self.DEBUG:
print(json.dumps(dic, indent=4))
(json.dumps(dic, indent=4))
if dic['BaseResponse']['Ret'] == 0:
self.SyncKey = dic['SyncKey']
self.synckey = '|'.join(
[str(keyVal['Key']) + '_' + str(keyVal['Val']) for keyVal in self.SyncKey['List']])
return dic
def webwxsendmsg(self, word, to='filehelper'):
url = self.base_uri + \
'/webwxsendmsg?pass_ticket=%s' % (self.pass_ticket)
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
params = {
'BaseRequest': self.BaseRequest,
'Msg': {
"Type": 1,
"Content": self._transcoding(word),
"FromUserName": self.User['UserName'],
"ToUserName": to,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(params, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def webwxuploadmedia(self, image_name):
url = 'https://file2.wx.qq.com/cgi-bin/mmwebwx-bin/webwxuploadmedia?f=json'
# 计数器
self.media_count = self.media_count + 1
# 文件名
file_name = image_name
# MIME格式
# mime_type = application/pdf, image/jpeg, image/png, etc.
mime_type = mimetypes.guess_type(image_name, strict=False)[0]
# 微信识别的文档格式,微信服务器应该只支持两种类型的格式。pic和doc
# pic格式,直接显示。doc格式则显示为文件。
media_type = 'pic' if mime_type.split('/')[0] == 'image' else 'doc'
# 上一次修改日期
lastModifieDate = 'Thu Mar 17 2016 00:55:10 GMT+0800 (CST)'
# 文件大小
file_size = os.path.getsize(file_name)
# PassTicket
pass_ticket = self.pass_ticket
# clientMediaId
client_media_id = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
# webwx_data_ticket
webwx_data_ticket = ''
for item in self.cookie:
if item.name == 'webwx_data_ticket':
webwx_data_ticket = item.value
break
if (webwx_data_ticket == ''):
return "None Fuck Cookie"
uploadmediarequest = json.dumps({
"BaseRequest": self.BaseRequest,
"ClientMediaId": client_media_id,
"TotalLen": file_size,
"StartPos": 0,
"DataLen": file_size,
"MediaType": 4
}, ensure_ascii=False).encode('utf8')
multipart_encoder = MultipartEncoder(
fields={
'id': 'WU_FILE_' + str(self.media_count),
'name': file_name,
'type': mime_type,
'lastModifieDate': lastModifieDate,
'size': str(file_size),
'mediatype': media_type,
'uploadmediarequest': uploadmediarequest,
'webwx_data_ticket': webwx_data_ticket,
'pass_ticket': pass_ticket,
'filename': (file_name, open(file_name, 'rb'), mime_type.split('/')[1])
},
boundary='-----------------------------1575017231431605357584454111'
)
headers = {
'Host': 'file2.wx.qq.com',
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:42.0) Gecko/20100101 Firefox/42.0',
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate',
'Referer': 'https://wx2.qq.com/',
'Content-Type': multipart_encoder.content_type,
'Origin': 'https://wx2.qq.com',
'Connection': 'keep-alive',
'Pragma': 'no-cache',
'Cache-Control': 'no-cache'
}
r = requests.post(url, data=multipart_encoder, headers=headers)
response_json = r.json()
if response_json['BaseResponse']['Ret'] == 0:
return response_json
return None
def webwxsendmsgimg(self, user_id, media_id):
url = 'https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxsendmsgimg?fun=async&f=json&pass_ticket=%s' % self.pass_ticket
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
data_json = {
"BaseRequest": self.BaseRequest,
"Msg": {
"Type": 3,
"MediaId": media_id,
"FromUserName": self.User['UserName'],
"ToUserName": user_id,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(data_json, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
return dic['BaseResponse']['Ret'] == 0
def webwxsendmsgemotion(self, user_id, media_id):
url = 'https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxsendemoticon?fun=sys&f=json&pass_ticket=%s' % self.pass_ticket
clientMsgId = str(int(time.time() * 1000)) + \
str(random.random())[:5].replace('.', '')
data_json = {
"BaseRequest": self.BaseRequest,
"Msg": {
"Type": 47,
"EmojiFlag": 2,
"MediaId": media_id,
"FromUserName": self.User['UserName'],
"ToUserName": user_id,
"LocalID": clientMsgId,
"ClientMsgId": clientMsgId
}
}
headers = {'content-type': 'application/json; charset=UTF-8'}
data = json.dumps(data_json, ensure_ascii=False).encode('utf8')
r = requests.post(url, data=data, headers=headers)
dic = r.json()
if self.DEBUG:
print(json.dumps(dic, indent=4))
logging.debug(json.dumps(dic, indent=4))
return dic['BaseResponse']['Ret'] == 0
def _saveFile(self, filename, data, api=None):
fn = filename
if self.saveSubFolders[api]:
dirName = os.path.join(self.saveFolder, self.saveSubFolders[api])
if not os.path.exists(dirName):
os.makedirs(dirName)
fn = os.path.join(dirName, filename)
logging.debug('Saved file: %s' % fn)
with open(fn, 'wb') as f:
f.write(data)
f.close()
return fn
def webwxgeticon(self, id):
url = self.base_uri + \
'/webwxgeticon?username=%s&skey=%s' % (id, self.skey)
data = self._get(url)
if data == '':
return ''
fn = 'img_' + id + '.jpg'
return self._saveFile(fn, data, 'webwxgeticon')
def webwxgetheadimg(self, id):
url = self.base_uri + \
'/webwxgetheadimg?username=%s&skey=%s' % (id, self.skey)
data = self._get(url)
if data == '':
return ''
fn = 'img_' + id + '.jpg'
return self._saveFile(fn, data, 'webwxgetheadimg')
def webwxgetmsgimg(self, msgid):
url = self.base_uri + \
'/webwxgetmsgimg?MsgID=%s&skey=%s' % (msgid, self.skey)
data = self._get(url)
if data == '':
return ''
fn = 'img_' + msgid + '.jpg'
return self._saveFile(fn, data, 'webwxgetmsgimg')
# Not work now for weixin haven't support this API
def webwxgetvideo(self, msgid):
url = self.base_uri + \
'/webwxgetvideo?msgid=%s&skey=%s' % (msgid, self.skey)
data = self._get(url, api='webwxgetvideo')
if data == '':
return ''
fn = 'video_' + msgid + '.mp4'
return self._saveFile(fn, data, 'webwxgetvideo')
def webwxgetvoice(self, msgid):
url = self.base_uri + \
'/webwxgetvoice?msgid=%s&skey=%s' % (msgid, self.skey)
data = self._get(url, api='webwxgetvoice')
if data == '':
return ''
fn = 'voice_' + msgid + '.mp3'
return self._saveFile(fn, data, 'webwxgetvoice')
def getGroupName(self, id):
name = '未知群'
for member in self.GroupList:
if member['UserName'] == id:
name = member['NickName']
if name == '未知群':
# 现有群里面查不到
GroupList = self.getNameById(id)
for group in GroupList:
self.GroupList.append(group)
if group['UserName'] == id:
name = group['NickName']
MemberList = group['MemberList']
for member in MemberList:
self.GroupMemeberList.append(member)
return name
def getUserRemarkName(self, id):
name = 'YYM2.0 Suppoert Group' if id[:2] == '@@' else '陌生人'
if id == self.User['UserName']:
return self.User['NickName'] # 自己
if id[:2] == '@@':
# 群
name = self.getGroupName(id)
else:
# 特殊账号
for member in self.SpecialUsersList:
if member['UserName'] == id:
name = member['RemarkName'] if member[
'RemarkName'] else member['NickName']
# 公众号或服务号
# for member in self.PublicUsersList:
# if member['UserName'] == id:
# name = member['RemarkName'] if member[
# 'RemarkName'] else member['NickName']
# 直接联系人
for member in self.ContactList:
if member['UserName'] == id:
name = member['RemarkName'] if member[
'RemarkName'] else member['NickName']
# Group
for member in self.GroupMemeberList:
if member['UserName'] == id:
name = member['DisplayName'] if member[
'DisplayName'] else member['NickName']
if name == 'YYM2.0 Suppoert Group' or name == '陌生人':
logging.debug(id)
return name
def getUSerID(self, name):
for member in self.MemberList:
if name == member['RemarkName'] or name == member['NickName']:
return member['UserName']
return None
def _showMsg(self, message):
srcName = None
dstName = None
groupName = None
content = None
msg = message
logging.debug(msg)
if msg['raw_msg']:
srcName = self.getUserRemarkName(msg['raw_msg']['FromUserName'])
dstName = self.getUserRemarkName(msg['raw_msg']['ToUserName'])
content = msg['raw_msg']['Content'].replace(
'<', '<').replace('>', '>')
message_id = msg['raw_msg']['MsgId']
if content.find('http://weixin.qq.com/cgi-bin/redirectforward?args=') != -1:
# 地理位置消息
data = self._get(content)
if data == '':
return
data.decode('gbk').encode('utf-8')
pos = self._searchContent('title', data, 'xml')
temp = self._get(content)
if temp == '':
return
tree = html.fromstring(temp)
url = tree.xpath('//html/body/div/img')[0].attrib['src']
for item in urlparse(url).query.split('&'):
if item.split('=')[0] == 'center':
loc = item.split('=')[-1:]
content = '%s 发送了一个 位置消息 - 我在 [%s](%s) @ %s]' % (
srcName, pos, url, loc)
if msg['raw_msg']['ToUserName'] == 'filehelper':
# 文件传输助手
dstName = '文件传输助手'
if msg['raw_msg']['FromUserName'][:2] == '@@':
# 接收到来自群的消息
if ":<br/>" in content:
[people, content] = content.split(':<br/>', 1)
groupName = srcName
srcName = self.getUserRemarkName(people)
dstName = 'GROUP'
else:
groupName = srcName
srcName = 'SYSTEM'
elif msg['raw_msg']['ToUserName'][:2] == '@@':
# 自己发给群的消息
groupName = dstName
dstName = 'GROUP'
# 指定了消息内容
if 'message' in list(msg.keys()):
content = msg['message']
if groupName != None:
print('%s |%s| %s -> %s: %s' % (message_id, groupName.strip(), srcName.strip(), dstName.strip(), content.replace('<br/>', '\n')))
logging.info('%s |%s| %s -> %s: %s' % (message_id, groupName.strip(),
srcName.strip(), dstName.strip(), content.replace('<br/>', '\n')))
else:
print('%s %s -> %s: %s' % (message_id, srcName.strip(), dstName.strip(), content.replace('<br/>', '\n')))
logging.info('%s %s -> %s: %s' % (message_id, srcName.strip(),
dstName.strip(), content.replace('<br/>', '\n')))
def handleMsg(self, r):
for msg in r['AddMsgList']:
print('[*] 你有新的消息,请注意查收')
logging.debug('[*] 你有新的消息,请注意查收')
if self.DEBUG:
fn = 'msg' + str(int(random.random() * 1000)) + '.json'
with open(fn, 'w') as f:
f.write(json.dumps(msg))
print('[*] 该消息已储存到文件: ' + fn)
logging.debug('[*] 该消息已储存到文件: %s' % (fn))
msgType = msg['MsgType']
name = self.getUserRemarkName(msg['FromUserName'])
content = msg['Content'].replace('<', '<').replace('>', '>')
msgid = msg['MsgId']
if msgType == 1:
raw_msg = {'raw_msg': msg}
self._showMsg(raw_msg)
#自己加的代码-------------------------------------------#
#if self.autoReplyRevokeMode:
# store
#自己加的代码-------------------------------------------#
if self.autoReplyMode:
ans = self._xiaodoubi(content) + '\n[Do not reply]'
if self.webwxsendmsg(ans, msg['FromUserName']):
print('自动回复: ' + ans)
logging.info('自动回复: ' + ans)
else:
print('自动回复失败')
logging.info('自动回复失败')
elif msgType == 3:
image = self.webwxgetmsgimg(msgid)
raw_msg = {'raw_msg': msg,
'message': '%s 发送了一张图片: %s' % (name, image)}
self._showMsg(raw_msg)
self._safe_open(image)
# elif msgType == 34:
# voice = self.webwxgetvoice(msgid)
# raw_msg = {'raw_msg': msg,
# 'message': '%s 发了一段语音: %s' % (name, voice)}
# self._showMsg(raw_msg)
# self._safe_open(voice)
elif msgType == 42:
info = msg['RecommendInfo']
print('%s 发送了一张名片:' % name)
print('=========================')
print('= 昵称: %s' % info['NickName'])
print('= 微信号: %s' % info['Alias'])
print('= 地区: %s %s' % (info['Province'], info['City']))
print('= 性别: %s' % ['未知', '男', '女'][info['Sex']])
print('=========================')
raw_msg = {'raw_msg': msg, 'message': '%s 发送了一张名片: %s' % (
name.strip(), json.dumps(info))}
self._showMsg(raw_msg)
elif msgType == 47:
url = self._searchContent('cdnurl', content)
raw_msg = {'raw_msg': msg,
'message': '%s 发了一个动画表情,点击下面链接查看: %s' % (name, url)}
self._showMsg(raw_msg)
self._safe_open(url)
# elif msgType == 49:
# appMsgType = defaultdict(lambda: "")
# appMsgType.update({5: '链接', 3: '音乐', 7: '微博'})
print('%s 分享了一个%s:' % (name, appMsgType[msg['AppMsgType']]))
print('=========================')
print('= 标题: %s' % msg['FileName'])
print('= 描述: %s' % self._searchContent('des', content, 'xml'))
print('= 链接: %s' % msg['Url'])
print('= 来自: %s' % self._searchContent('appname', content, 'xml'))
print('=========================')
card = {
'title': msg['FileName'],
'description': self._searchContent('des', content, 'xml'),
'url': msg['Url'],
'appname': self._searchContent('appname', content, 'xml')
}
raw_msg = {'raw_msg': msg, 'message': '%s 分享了一个%s: %s' % (
name, appMsgType[msg['AppMsgType']], json.dumps(card))}
self._showMsg(raw_msg)
elif msgType == 51:
raw_msg = {'raw_msg': msg, 'message': '[*] 成功获取联系人信息'}
self._showMsg(raw_msg)
elif msgType == 62:
video = self.webwxgetvideo(msgid)
raw_msg = {'raw_msg': msg,
'message': '%s 发了一段小视频: %s' % (name, video)}
self._showMsg(raw_msg)
self._safe_open(video)
elif msgType == 10002:
raw_msg = {'raw_msg': msg, 'message': '%s 撤回了一条消息' % name}
self._showMsg(raw_msg)
else:
logging.debug('[*] 该消息类型为: %d,可能是表情,图片, 链接或红包: %s' %
(msg['MsgType'], json.dumps(msg)))
raw_msg = {
'raw_msg': msg, 'message': '[*] 该消息类型为: %d,可能是表情,图片, 链接或红包' % msg['MsgType']}
self._showMsg(raw_msg)
#def image_to_text(pixels, width, height):
#color = "MNHQ$OC?7>!:-;. "
#string = ""
#for h in xrange(height):
# for w in xrange(width):
# rgb = pixels[w, h]
# string += color[int(sum(rgb) / 3.0 / 256.0 * 16)]
# string += "\n"
#return string
# Load image from file and resize it.
# def load_and_resize_image(imgname, width, height):
#img = Image.open(imgname)
#if img.mode != 'RGB':
# img = img.convert('RGB')
# w, h = img.size
#rw = width * 1.0 / w
#rh = height * 1.0 / h
#r = rw if rw < rh else rh
#rw = int(r * w)
#rh = int(r * h)
#img = img.resize((rw, rh), Image.ANTIALIAS)
#return img
# Convert image file to plain ascii text.
#def image_file_to_text(img_file_path, dst_width, dst_height):
# img = load_and_resize_image(img_file_path, dst_width, dst_height)
#pixels = img.load()
#width, height = img.size
#string = image_to_text(pixels, width, height)
#return string
#if __name__ == '__main__':
# imgname = sys.argv[1] # Image file path
# w = int(sys.argv[2]) # width
# h = int(sys.argv[3]) # height
#print (image_file_to_text(imgname, w, h))
def listenMsgMode(self):
print('[*] 进入消息监听模式 ... 成功')
logging.debug('[*] 进入消息监听模式 ... 成功')
self._run('[*] 进行同步线路测试 ... ', self.testsynccheck)
playWeChat = 0
redEnvelope = 0
while True:
self.lastCheckTs = time.time()
[retcode, selector] = self.synccheck()
if self.DEBUG:
print('retcode: %s, selector: %s' % (retcode, selector))
logging.debug('retcode: %s, selector: %s' % (retcode, selector))
if retcode == '1100':
print('[*] logout')
logging.debug('[*] logout')
break
if retcode == '1101':
print('[*] logout')
logging.debug('[*] logout')
break
elif retcode == '0':
if selector == '2':
r = self.webwxsync()
if r is not None:
self.handleMsg(r)
elif selector == '6':
# TODO
redEnvelope += 1
print('[*] 收到疑似红包消息 %d 次' % redEnvelope)
logging.debug('[*] 收到疑似红包消息 %d 次' % redEnvelope)
elif selector == '7':
playWeChat += 1
print('[*] logout %d 次' % playWeChat)
logging.debug('[*] logout %d 次' % playWeChat)
r = self.webwxsync()
elif selector == '0':
time.sleep(1)
if (time.time() - self.lastCheckTs) <= 20:
time.sleep(time.time() - self.lastCheckTs)
def sendMsg(self, name, word, isfile=False):
id = self.getUSerID(name)
if id:
if isfile:
with open(word, 'r') as f:
for line in f.readlines():
line = line.replace('\n', '')
self._echo('-> ' + name + ': ' + line)
if self.webwxsendmsg(line, id):
print(' [成功]')
else:
print(' [失败]')
time.sleep(1)
else:
if self.webwxsendmsg(word, id):
print('[*] 消息发送成功')
logging.debug('[*] 消息发送成功')
else:
print('[*] 消息发送失败')
logging.debug('[*] 消息发送失败')
else:
print('[*] 此用户不存在')
logging.debug('[*] 此用户不存在')
def sendMsgToAll(self, word):
for contact in self.ContactList:
name = contact['RemarkName'] if contact[
'RemarkName'] else contact['NickName']
id = contact['UserName']
self._echo('-> ' + name + ': ' + word)
if self.webwxsendmsg(word, id):
print(' [成功]')
else:
print(' [失败]')
time.sleep(1)
def sendImg(self, name, file_name):
response = self.webwxuploadmedia(file_name)
media_id = ""
if response is not None:
media_id = response['MediaId']
user_id = self.getUSerID(name)
response = self.webwxsendmsgimg(user_id, media_id)
def sendEmotion(self, name, file_name):
response = self.webwxuploadmedia(file_name)
media_id = ""
if response is not None:
media_id = response['MediaId']
user_id = self.getUSerID(name)
response = self.webwxsendmsgemotion(user_id, media_id)
@catchKeyboardInterrupt
def start(self):
self._echo('[*] 微信网页版 ... 开动')
print()
logging.debug('[*] 微信网页版 ... 开动')
while True:
self._run('[*] 正在获取 uuid ... ', self.getUUID)
self._echo('[*] 正在获取二维码 ... 成功')
print()
logging.debug('[*] 微信网页版 ... 开动')
self.genQRCode()
print('[*] 请使用微信扫描二维码以登录 ... ')
if not self.waitForLogin():
continue
print('[*] 请在手机上点击确认以登录 ... ')
if not self.waitForLogin(0):
continue
break
self._run('[*] 正在登录 ... ', self.login)
self._run('[*] 微信初始化 ... ', self.webwxinit)
self._run('[*] 开启状态通知 ... ', self.webwxstatusnotify)
self._run('[*] 获取联系人 ... ', self.webwxgetcontact)
self._echo('[*] 应有 %s 个联系人,读取到联系人 %d 个' %
(self.MemberCount, len(self.MemberList)))
print()
self._echo('[*] 共有 %d 个群 | %d 个直接联系人 ' % (len(self.GroupList),
len(self.ContactList), len(self.SpecialUsersList), len(self.PublicUsersList)))
print()
self._run('[*] 获取群 ... ', self.webwxbatchgetcontact)
logging.debug('[*] 微信网页版 ... 开动')
if self.DEBUG:
print(self)
logging.debug(self)
if self.interactive and input('[*] 是否开启自动回复模式(y/n): ') == 'y':
self.autoReplyMode = True
print('[*] 自动回复模式 ... 开启')
logging.debug('[*] 自动回复模式 ... 开启')
else:
print('[*] 自动回复模式 ... 关闭')
logging.debug('[*] 自动回复模式 ... 关闭')
if sys.platform.startswith('win'):
import _thread
_thread.start_new_thread(self.listenMsgMode())
else:
listenProcess = multiprocessing.Process(target=self.listenMsgMode)
listenProcess.start()
while True:
text = input('')
if text == 'quit':
listenProcess.terminate()
print('[*] 退出微信')
logging.debug('[*] 退出微信')
exit()
elif text[:2] == '->':
[name, word] = text[2:].split(':')
if name == 'all':
self.sendMsgToAll(word)
else:
self.sendMsg(name, word)
elif text[:3] == 'm->':
[name, file] = text[3:].split(':')
self.sendMsg(name, file, True)
elif text[:3] == 'f->':
print('发送文件')
logging.debug('发送文件')
elif text[:3] == 'i->':
print('发送图片')
[name, file_name] = text[3:].split(':')
self.sendImg(name, file_name)
logging.debug('发送图片')
elif text[:3] == 'e->':
print('发送表情')
[name, file_name] = text[3:].split(':')
self.sendEmotion(name, file_name)
logging.debug('发送表情')
def _safe_open(self, path):
if self.autoOpen:
if platform.system() == "Linux":
os.system("xdg-open %s &" % path)
else:
os.system('open %s &' % path)
def _run(self, str, func, *args):
self._echo(str)
if func(*args):
print('成功')
logging.debug('%s... 成功' % (str))
else:
print('失败\n[*] 退出程序')
logging.debug('%s... 失败' % (str))
logging.debug('[*] 退出程序')
exit()
def _echo(self, str):
sys.stdout.write(str)
sys.stdout.flush()
def _printQR(self, mat):
for i in mat:
BLACK = '\033[40m \033[0m'
WHITE = '\033[47m \033[0m'
print(''.join([BLACK if j else WHITE for j in i]))
def _str2qr(self, str):
print(str)
qr = qrcode.QRCode()
qr.border = 1
qr.add_data(str)
qr.make()
# img = qr.make_image()
# img.save("qrcode.png")
#mat = qr.get_matrix()
#self._printQR(mat) # qr.print_tty() or qr.print_ascii()
qr.print_ascii(invert=True)
def _transcoding(self, data):
if not data:
return data
result = None
if type(data) == str:
result = data
elif type(data) == str:
result = data.decode('utf-8')
return result
def _get(self, url: object, api: object = None, timeout: object = None) -> object:
request = urllib.request.Request(url=url)
request.add_header('Referer', 'https://wx.qq.com/')
if api == 'webwxgetvoice':
request.add_header('Range', 'bytes=0-')
if api == 'webwxgetvideo':
request.add_header('Range', 'bytes=0-')
try:
response = urllib.request.urlopen(request, timeout=timeout) if timeout else urllib.request.urlopen(request)
if api == 'webwxgetvoice' or api == 'webwxgetvideo':
data = response.read()
else:
data = response.read().decode('utf-8')
logging.debug(url)
return data
except urllib.error.HTTPError as e:
logging.error('HTTPError = ' + str(e.code))
except urllib.error.URLError as e:
logging.error('URLError = ' + str(e.reason))
except http.client.HTTPException as e:
logging.error('HTTPException')
except timeout_error as e:
pass
except ssl.CertificateError as e:
pass
except Exception:
import traceback
logging.error('generic exception: ' + traceback.format_exc())
return ''
def _post(self, url: object, params: object, jsonfmt: object = True) -> object:
if jsonfmt:
data = (json.dumps(params)).encode()
request = urllib.request.Request(url=url, data=data)
request.add_header(
'ContentType', 'application/json; charset=UTF-8')
else:
request = urllib.request.Request(url=url, data=urllib.parse.urlencode(params).encode(encoding='utf-8'))
try:
response = urllib.request.urlopen(request)
data = response.read()
if jsonfmt:
return json.loads(data.decode('utf-8') )#object_hook=_decode_dict)
return data
except urllib.error.HTTPError as e:
logging.error('HTTPError = ' + str(e.code))
except urllib.error.URLError as e:
logging.error('URLError = ' + str(e.reason))
except http.client.HTTPException as e:
logging.error('HTTPException')
except Exception:
import traceback
logging.error('generic exception: ' + traceback.format_exc())
return ''
def _xiaodoubi(self, word):
url = 'http://www.xiaodoubi.com/bot/chat.php'
try:
r = requests.post(url, data={'chat': word})
return r.content
except:
return "让我一个人静静 T_T..."
def _simsimi(self, word):
key = ''
url = 'http://sandbox.api.simsimi.com/request.p?key=%s&lc=ch&ft=0.0&text=%s' % (
key, word)
r = requests.get(url)
ans = r.json()
if ans['result'] == '100':
return ans['response']
else:
return '你在说什么,风太大听不清列'
def _searchContent(self, key, content, fmat='attr'):
if fmat == 'attr':
pm = re.search(key + '\s?=\s?"([^"<]+)"', content)
if pm:
return pm.group(1)
elif fmat == 'xml':
pm = re.search('<{0}>([^<]+)</{0}>'.format(key), content)
if not pm:
pm = re.search(
'<{0}><\!\[CDATA\[(.*?)\]\]></{0}>'.format(key), content)
if pm:
return pm.group(1)
return '未知'
class UnicodeStreamFilter:
def __init__(self, target):
self.target = target
self.encoding = 'utf-8'
self.errors = 'replace'
self.encode_to = self.target.encoding
def write(self, s):
if type(s) == str:
s = s.encode().decode('utf-8')
s = s.encode(self.encode_to, self.errors).decode(self.encode_to)
self.target.write(s)
def flush(self):
self.target.flush()
if sys.stdout.encoding == 'cp936':
sys.stdout = UnicodeStreamFilter(sys.stdout)
if __name__ == '__main__':
logger = logging.getLogger(__name__)
if not sys.platform.startswith('win'):
import coloredlogs
coloredlogs.install(level='DEBUG')
webwx = WebWeixin()
webwx.start()
|
{
"content_hash": "d001729dc73f79c0456c0ca6afd98d7e",
"timestamp": "",
"source": "github",
"line_count": 1253,
"max_line_length": 291,
"avg_line_length": 36.95450917797287,
"alnum_prop": 0.4869341741534209,
"repo_name": "Hoverhuang-er/CBotTester",
"id": "a85eab98a6ced9a83747e928280491f70e99969d",
"size": "47854",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "py3edition/weixin.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Go",
"bytes": "1910"
},
{
"name": "Java",
"bytes": "1045"
},
{
"name": "Python",
"bytes": "124015"
}
],
"symlink_target": ""
}
|
import unittest
from jamenson.runtime.picklep import picklep
#global functions
from dis import dis
from pprint import pprint
from tokenize import tokenize
class TestPicklep(unittest.TestCase):
def check_pickleable(self, op):
self.failUnless(picklep(op), '%s is not pickleable' % (op,))
def check_nonpickleable(self, op):
self.failIf(picklep(op), '%s is pickleable' % (op,))
def test_atomic(self):
test = self.check_pickleable
test(None)
test(3)
test(10.3)
test(5L)
test('adfaf')
test(u'afafd')
test(1j)
def test_collections(self):
test = self.check_pickleable
test(())
test([])
test({})
test(set())
test(frozenset())
test((1,2,3))
test(range(10))
test({'a':10L})
test(set('adfafadfadffad'))
test(frozenset(range(20)))
test({'zzz':[range(5)]*10})
def test_builtins(self):
test = self.check_pickleable
test(object)
test(type)
test(IOError)
test(ValueError("stuff"))
test(map)
test(xrange(20))
def test_global_functions(self):
test = self.check_pickleable
test(dis)
test(pprint)
test(tokenize)
def test_non_pickleable(self):
test = self.check_nonpickleable
test(lambda a,b: None)
def test_cyclic_collections(self):
test = self.check_pickleable
l = []
l.append(l)
test(l)
x = []
d = {'a':x}
x.append(d)
test(d)
test(x)
__name__ == '__main__' and unittest.main()
|
{
"content_hash": "0a835f28a19a682e2c8a9ea4b36c580c",
"timestamp": "",
"source": "github",
"line_count": 74,
"max_line_length": 68,
"avg_line_length": 22.37837837837838,
"alnum_prop": 0.5452898550724637,
"repo_name": "matthagy/Jamenson",
"id": "b1133cd0e1832713047b5cea924d7add26079ad5",
"size": "1657",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jamenson/tests/runtime/picklep.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "444789"
}
],
"symlink_target": ""
}
|
"""Test configurations for nose
This module contains nose plugin hooks that configures Beam tests which
includes ValidatesRunner test and E2E integration test.
TODO(BEAM-3713): Remove this module once nose is removed.
"""
from nose.plugins import Plugin
class BeamTestPlugin(Plugin):
"""A nose plugin for Beam testing that registers command line options
This plugin is registered through setuptools in entry_points.
"""
def options(self, parser, env):
"""Add '--test-pipeline-options' and '--not_use-test-runner-api'
to command line option to avoid unrecognized option error thrown by nose.
The value of this option will be processed by TestPipeline and used to
build customized pipeline for ValidatesRunner tests.
"""
parser.add_option('--test-pipeline-options',
action='store',
type=str,
help='providing pipeline options to run tests on runner')
parser.add_option('--not-use-test-runner-api',
action='store_true',
default=False,
help='whether not to use test-runner-api')
|
{
"content_hash": "232810ab8add4f0549c519926644b886",
"timestamp": "",
"source": "github",
"line_count": 32,
"max_line_length": 79,
"avg_line_length": 35.9375,
"alnum_prop": 0.662608695652174,
"repo_name": "axbaretto/beam",
"id": "d916a7cd9b7764f0da0627687acd2f4b98fdbf50",
"size": "1935",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sdks/python/test_config.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "ANTLR",
"bytes": "1598"
},
{
"name": "Batchfile",
"bytes": "3220"
},
{
"name": "C",
"bytes": "1339873"
},
{
"name": "C++",
"bytes": "1132901"
},
{
"name": "CSS",
"bytes": "124283"
},
{
"name": "Dockerfile",
"bytes": "23950"
},
{
"name": "FreeMarker",
"bytes": "7428"
},
{
"name": "Go",
"bytes": "2795906"
},
{
"name": "Groovy",
"bytes": "187109"
},
{
"name": "HTML",
"bytes": "238575"
},
{
"name": "Java",
"bytes": "39085315"
},
{
"name": "JavaScript",
"bytes": "1221326"
},
{
"name": "Jupyter Notebook",
"bytes": "7396"
},
{
"name": "Makefile",
"bytes": "354938"
},
{
"name": "Python",
"bytes": "51449019"
},
{
"name": "Roff",
"bytes": "70716"
},
{
"name": "Ruby",
"bytes": "4159"
},
{
"name": "Shell",
"bytes": "351541"
},
{
"name": "TeX",
"bytes": "70920"
},
{
"name": "Thrift",
"bytes": "1118"
}
],
"symlink_target": ""
}
|
"""
flaskext.uploads
================
This module provides upload support for Flask. The basic pattern is to set up
an `UploadSet` object and upload your files to it.
:copyright: 2010 Matthew "LeafStorm" Frazier
:license: MIT/X11, see LICENSE for details
"""
import sys
PY3 = sys.version_info[0] == 3
if PY3:
string_types = str,
else:
string_types = basestring,
import os.path
import posixpath
from flask import current_app, send_from_directory, abort, url_for
from itertools import chain
from werkzeug import secure_filename, FileStorage
from flask import Blueprint
# Extension presets
#: This just contains plain text files (.txt).
TEXT = ('txt',)
#: This contains various office document formats (.rtf, .odf, .ods, .gnumeric,
#: .abw, .doc, .docx, .xls, and .xlsx). Note that the macro-enabled versions
#: of Microsoft Office 2007 files are not included.
DOCUMENTS = tuple('rtf odf ods gnumeric abw doc docx xls xlsx'.split())
#: This contains basic image types that are viewable from most browsers (.jpg,
#: .jpe, .jpeg, .png, .gif, .svg, and .bmp).
IMAGES = tuple('jpg jpe jpeg png gif svg bmp'.split())
#: This contains audio file types (.wav, .mp3, .aac, .ogg, .oga, and .flac).
AUDIO = tuple('wav mp3 aac ogg oga flac'.split())
#: This is for structured data files (.csv, .ini, .json, .plist, .xml, .yaml,
#: and .yml).
DATA = tuple('csv ini json plist xml yaml yml'.split())
#: This contains various types of scripts (.js, .php, .pl, .py .rb, and .sh).
#: If your Web server has PHP installed and set to auto-run, you might want to
#: add ``php`` to the DENY setting.
SCRIPTS = tuple('js php pl py rb sh'.split())
#: This contains archive and compression formats (.gz, .bz2, .zip, .tar,
#: .tgz, .txz, and .7z).
ARCHIVES = tuple('gz bz2 zip tar tgz txz 7z'.split())
#: This contains shared libraries and executable files (.so, .exe and .dll).
#: Most of the time, you will not want to allow this - it's better suited for
#: use with `AllExcept`.
EXECUTABLES = tuple('so exe dll'.split())
#: The default allowed extensions - `TEXT`, `DOCUMENTS`, `DATA`, and `IMAGES`.
DEFAULTS = TEXT + DOCUMENTS + IMAGES + DATA
class UploadNotAllowed(Exception):
"""
This exception is raised if the upload was not allowed. You should catch
it in your view code and display an appropriate message to the user.
"""
def tuple_from(*iters):
return tuple(itertools.chain(*iters))
def extension(filename):
ext = os.path.splitext(filename)[1]
if ext.startswith('.'):
# os.path.splitext retains . separator
ext = ext[1:]
return ext
def lowercase_ext(filename):
"""
This is a helper used by UploadSet.save to provide lowercase extensions for
all processed files, to compare with configured extensions in the same
case.
.. versionchanged:: 0.1.4
Filenames without extensions are no longer lowercased, only the
extension is returned in lowercase, if an extension exists.
:param filename: The filename to ensure has a lowercase extension.
"""
if '.' in filename:
main, ext = os.path.splitext(filename)
return main + ext.lower()
# For consistency with os.path.splitext,
# do not treat a filename without an extension as an extension.
# That is, do not return filename.lower().
return filename
def addslash(url):
if url.endswith('/'):
return url
return url + '/'
def patch_request_class(app, size=64 * 1024 * 1024):
"""
By default, Flask will accept uploads to an arbitrary size. While Werkzeug
switches uploads from memory to a temporary file when they hit 500 KiB,
it's still possible for someone to overload your disk space with a
gigantic file.
This patches the app's request class's
`~werkzeug.BaseRequest.max_content_length` attribute so that any upload
larger than the given size is rejected with an HTTP error.
.. note::
In Flask 0.6, you can do this by setting the `MAX_CONTENT_LENGTH`
setting, without patching the request class. To emulate this behavior,
you can pass `None` as the size (you must pass it explicitly). That is
the best way to call this function, as it won't break the Flask 0.6
functionality if it exists.
.. versionchanged:: 0.1.1
:param app: The app to patch the request class of.
:param size: The maximum size to accept, in bytes. The default is 64 MiB.
If it is `None`, the app's `MAX_CONTENT_LENGTH` configuration
setting will be used to patch.
"""
if size is None:
if isinstance(app.request_class.__dict__['max_content_length'],
property):
return
size = app.config.get('MAX_CONTENT_LENGTH')
reqclass = app.request_class
patched = type(reqclass.__name__, (reqclass,),
{'max_content_length': size})
app.request_class = patched
def config_for_set(uset, app, defaults=None):
"""
This is a helper function for `configure_uploads` that extracts the
configuration for a single set.
:param uset: The upload set.
:param app: The app to load the configuration from.
:param defaults: A dict with keys `url` and `dest` from the
`UPLOADS_DEFAULT_DEST` and `DEFAULT_UPLOADS_URL`
settings.
"""
config = app.config
prefix = 'UPLOADED_%s_' % uset.name.upper()
using_defaults = False
if defaults is None:
defaults = dict(dest=None, url=None)
allow_extns = tuple(config.get(prefix + 'ALLOW', ()))
deny_extns = tuple(config.get(prefix + 'DENY', ()))
destination = config.get(prefix + 'DEST')
base_url = config.get(prefix + 'URL')
if destination is None:
# the upload set's destination wasn't given
if uset.default_dest:
# use the "default_dest" callable
destination = uset.default_dest(app)
if destination is None: # still
# use the default dest from the config
if defaults['dest'] is not None:
using_defaults = True
destination = os.path.join(defaults['dest'], uset.name)
else:
raise RuntimeError("no destination for set %s" % uset.name)
if base_url is None and using_defaults and defaults['url']:
base_url = addslash(defaults['url']) + uset.name + '/'
return UploadConfiguration(destination, base_url, allow_extns, deny_extns)
def configure_uploads(app, upload_sets):
"""
Call this after the app has been configured. It will go through all the
upload sets, get their configuration, and store the configuration on the
app. It will also register the uploads module if it hasn't been set. This
can be called multiple times with different upload sets.
.. versionchanged:: 0.1.3
The uploads module/blueprint will only be registered if it is needed
to serve the upload sets.
:param app: The `~flask.Flask` instance to get the configuration from.
:param upload_sets: The `UploadSet` instances to configure.
"""
if isinstance(upload_sets, UploadSet):
upload_sets = (upload_sets,)
if not hasattr(app, 'upload_set_config'):
app.upload_set_config = {}
set_config = app.upload_set_config
defaults = dict(dest=app.config.get('UPLOADS_DEFAULT_DEST'),
url=app.config.get('UPLOADS_DEFAULT_URL'))
for uset in upload_sets:
config = config_for_set(uset, app, defaults)
set_config[uset.name] = config
should_serve = any(s.base_url is None for s in set_config.values())
if '_uploads' not in app.blueprints and should_serve:
app.register_blueprint(uploads_mod)
class All(object):
"""
This type can be used to allow all extensions. There is a predefined
instance named `ALL`.
"""
def __contains__(self, item):
return True
#: This "contains" all items. You can use it to allow all extensions to be
#: uploaded.
ALL = All()
class AllExcept(object):
"""
This can be used to allow all file types except certain ones. For example,
to ban .exe and .iso files, pass::
AllExcept(('exe', 'iso'))
to the `UploadSet` constructor as `extensions`. You can use any container,
for example::
AllExcept(SCRIPTS + EXECUTABLES)
"""
def __init__(self, items):
self.items = items
def __contains__(self, item):
return item not in self.items
class UploadConfiguration(object):
"""
This holds the configuration for a single `UploadSet`. The constructor's
arguments are also the attributes.
:param destination: The directory to save files to.
:param base_url: The URL (ending with a /) that files can be downloaded
from. If this is `None`, Flask-Uploads will serve the
files itself.
:param allow: A list of extensions to allow, even if they're not in the
`UploadSet` extensions list.
:param deny: A list of extensions to deny, even if they are in the
`UploadSet` extensions list.
"""
def __init__(self, destination, base_url=None, allow=(), deny=()):
self.destination = destination
self.base_url = base_url
self.allow = allow
self.deny = deny
@property
def tuple(self):
return (self.destination, self.base_url, self.allow, self.deny)
def __eq__(self, other):
return self.tuple == other.tuple
class UploadSet(object):
"""
This represents a single set of uploaded files. Each upload set is
independent of the others. This can be reused across multiple application
instances, as all configuration is stored on the application object itself
and found with `flask.current_app`.
:param name: The name of this upload set. It defaults to ``files``, but
you can pick any alphanumeric name you want. (For simplicity,
it's best to use a plural noun.)
:param extensions: The extensions to allow uploading in this set. The
easiest way to do this is to add together the extension
presets (for example, ``TEXT + DOCUMENTS + IMAGES``).
It can be overridden by the configuration with the
`UPLOADED_X_ALLOW` and `UPLOADED_X_DENY` configuration
parameters. The default is `DEFAULTS`.
:param default_dest: If given, this should be a callable. If you call it
with the app, it should return the default upload
destination path for that app.
"""
def __init__(self, name='files', extensions=DEFAULTS, default_dest=None):
if not name.isalnum():
raise ValueError("Name must be alphanumeric (no underscores)")
self.name = name
self.extensions = extensions
self._config = None
self.default_dest = default_dest
@property
def config(self):
"""
This gets the current configuration. By default, it looks up the
current application and gets the configuration from there. But if you
don't want to go to the full effort of setting an application, or it's
otherwise outside of a request context, set the `_config` attribute to
an `UploadConfiguration` instance, then set it back to `None` when
you're done.
"""
if self._config is not None:
return self._config
try:
return current_app.upload_set_config[self.name]
except AttributeError:
raise RuntimeError("cannot access configuration outside request")
def url(self, filename):
"""
This function gets the URL a file uploaded to this set would be
accessed at. It doesn't check whether said file exists.
:param filename: The filename to return the URL for.
"""
base = self.config.base_url
if base is None:
return url_for('_uploads.uploaded_file', setname=self.name,
filename=filename, _external=True)
else:
return base + filename
def path(self, filename, folder=None):
"""
This returns the absolute path of a file uploaded to this set. It
doesn't actually check whether said file exists.
:param filename: The filename to return the path for.
:param folder: The subfolder within the upload set previously used
to save to.
"""
if folder is not None:
target_folder = os.path.join(self.config.destination, folder)
else:
target_folder = self.config.destination
return os.path.join(target_folder, filename)
def file_allowed(self, storage, basename):
"""
This tells whether a file is allowed. It should return `True` if the
given `werkzeug.FileStorage` object can be saved with the given
basename, and `False` if it can't. The default implementation just
checks the extension, so you can override this if you want.
:param storage: The `werkzeug.FileStorage` to check.
:param basename: The basename it will be saved under.
"""
return self.extension_allowed(extension(basename))
def extension_allowed(self, ext):
"""
This determines whether a specific extension is allowed. It is called
by `file_allowed`, so if you override that but still want to check
extensions, call back into this.
:param ext: The extension to check, without the dot.
"""
return ((ext in self.config.allow) or
(ext in self.extensions and ext not in self.config.deny))
def get_basename(self, filename):
return lowercase_ext(secure_filename(filename))
def save(self, storage, folder=None, name=None):
"""
This saves a `werkzeug.FileStorage` into this upload set. If the
upload is not allowed, an `UploadNotAllowed` error will be raised.
Otherwise, the file will be saved and its name (including the folder)
will be returned.
:param storage: The uploaded file to save.
:param folder: The subfolder within the upload set to save to.
:param name: The name to save the file as. If it ends with a dot, the
file's extension will be appended to the end. (If you
are using `name`, you can include the folder in the
`name` instead of explicitly using `folder`, i.e.
``uset.save(file, name="someguy/photo_123.")``
"""
if not isinstance(storage, FileStorage):
raise TypeError("storage must be a werkzeug.FileStorage")
if folder is None and name is not None and "/" in name:
folder, name = os.path.split(name)
basename = self.get_basename(storage.filename)
if name:
if name.endswith('.'):
basename = name + extension(basename)
else:
basename = name
if not self.file_allowed(storage, basename):
raise UploadNotAllowed()
if folder:
target_folder = os.path.join(self.config.destination, folder)
else:
target_folder = self.config.destination
if not os.path.exists(target_folder):
os.makedirs(target_folder)
if os.path.exists(os.path.join(target_folder, basename)):
basename = self.resolve_conflict(target_folder, basename)
target = os.path.join(target_folder, basename)
storage.save(target)
if folder:
return posixpath.join(folder, basename)
else:
return basename
def resolve_conflict(self, target_folder, basename):
"""
If a file with the selected name already exists in the target folder,
this method is called to resolve the conflict. It should return a new
basename for the file.
The default implementation splits the name and extension and adds a
suffix to the name consisting of an underscore and a number, and tries
that until it finds one that doesn't exist.
:param target_folder: The absolute path to the target.
:param basename: The file's original basename.
"""
name, ext = os.path.splitext(basename)
count = 0
while True:
count = count + 1
newname = '%s_%d%s' % (name, count, ext)
if not os.path.exists(os.path.join(target_folder, newname)):
return newname
uploads_mod = Blueprint('_uploads', __name__, url_prefix='/_uploads')
@uploads_mod.route('/<setname>/<path:filename>')
def uploaded_file(setname, filename):
config = current_app.upload_set_config.get(setname)
if config is None:
abort(404)
return send_from_directory(config.destination, filename)
class TestingFileStorage(FileStorage):
"""
This is a helper for testing upload behavior in your application. You
can manually create it, and its save method is overloaded to set `saved`
to the name of the file it was saved to. All of these parameters are
optional, so only bother setting the ones relevant to your application.
:param stream: A stream. The default is an empty stream.
:param filename: The filename uploaded from the client. The default is the
stream's name.
:param name: The name of the form field it was loaded from. The default is
`None`.
:param content_type: The content type it was uploaded as. The default is
``application/octet-stream``.
:param content_length: How long it is. The default is -1.
:param headers: Multipart headers as a `werkzeug.Headers`. The default is
`None`.
"""
def __init__(self, stream=None, filename=None, name=None,
content_type='application/octet-stream', content_length=-1,
headers=None):
FileStorage.__init__(self, stream, filename, name=name,
content_type=content_type, content_length=content_length,
headers=None)
self.saved = None
def save(self, dst, buffer_size=16384):
"""
This marks the file as saved by setting the `saved` attribute to the
name of the file it was saved to.
:param dst: The file to save to.
:param buffer_size: Ignored.
"""
if isinstance(dst, string_types):
self.saved = dst
else:
self.saved = dst.name
|
{
"content_hash": "483d6591bc7a5bf0ca775d382fc9bc4d",
"timestamp": "",
"source": "github",
"line_count": 502,
"max_line_length": 79,
"avg_line_length": 37.24900398406375,
"alnum_prop": 0.634311995293866,
"repo_name": "171121130/SWI",
"id": "a10b72ce3f6ba7cef0b48d49b0d07836e701f299",
"size": "18723",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "venv/Lib/site-packages/flask_uploads.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "1316"
},
{
"name": "C",
"bytes": "311064"
},
{
"name": "C++",
"bytes": "212386"
},
{
"name": "CSS",
"bytes": "7326"
},
{
"name": "GAP",
"bytes": "11337"
},
{
"name": "HTML",
"bytes": "155293"
},
{
"name": "JavaScript",
"bytes": "6187"
},
{
"name": "Mako",
"bytes": "9463"
},
{
"name": "PowerShell",
"bytes": "8175"
},
{
"name": "Python",
"bytes": "15004056"
},
{
"name": "Tcl",
"bytes": "1284698"
}
],
"symlink_target": ""
}
|
from PySide import QtCore, QtGui
class Ui_Settings(object):
def setupUi(self, Settings):
Settings.setObjectName("Settings")
Settings.resize(382, 286)
self.verticalLayout = QtGui.QVBoxLayout(Settings)
self.verticalLayout.setObjectName("verticalLayout")
self.tabWidget = QtGui.QTabWidget(Settings)
self.tabWidget.setObjectName("tabWidget")
self.tabGeneral = QtGui.QWidget()
self.tabGeneral.setObjectName("tabGeneral")
self.verticalLayout_2 = QtGui.QVBoxLayout(self.tabGeneral)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox = QtGui.QGroupBox(self.tabGeneral)
self.groupBox.setObjectName("groupBox")
self.verticalLayout_3 = QtGui.QVBoxLayout(self.groupBox)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label_2 = QtGui.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.sbWidth = QtGui.QSpinBox(self.groupBox)
self.sbWidth.setMinimum(1)
self.sbWidth.setMaximum(65536)
self.sbWidth.setProperty("value", 640)
self.sbWidth.setObjectName("sbWidth")
self.gridLayout.addWidget(self.sbWidth, 0, 1, 1, 1)
self.cbKindSize = QtGui.QComboBox(self.groupBox)
self.cbKindSize.setObjectName("cbKindSize")
self.cbKindSize.addItem("")
self.gridLayout.addWidget(self.cbKindSize, 1, 2, 1, 1)
self.label = QtGui.QLabel(self.groupBox)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.sbHeight = QtGui.QSpinBox(self.groupBox)
self.sbHeight.setMinimum(1)
self.sbHeight.setMaximum(65536)
self.sbHeight.setProperty("value", 480)
self.sbHeight.setObjectName("sbHeight")
self.gridLayout.addWidget(self.sbHeight, 1, 1, 1, 1)
self.label_3 = QtGui.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1)
self.sbHistoryDepth = QtGui.QSpinBox(self.groupBox)
self.sbHistoryDepth.setMaximum(1024)
self.sbHistoryDepth.setProperty("value", 40)
self.sbHistoryDepth.setObjectName("sbHistoryDepth")
self.gridLayout.addWidget(self.sbHistoryDepth, 2, 1, 1, 1)
self.verticalLayout_3.addLayout(self.gridLayout)
spacerItem = QtGui.QSpacerItem(20, 40, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.verticalLayout_3.addItem(spacerItem)
self.verticalLayout_2.addWidget(self.groupBox)
self.tabWidget.addTab(self.tabGeneral, "")
self.verticalLayout.addWidget(self.tabWidget)
self.buttonBox = QtGui.QDialogButtonBox(Settings)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout.addWidget(self.buttonBox)
self.retranslateUi(Settings)
self.tabWidget.setCurrentIndex(0)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), Settings.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), Settings.reject)
QtCore.QMetaObject.connectSlotsByName(Settings)
def retranslateUi(self, Settings):
Settings.setWindowTitle(QtGui.QApplication.translate("Settings", "Settings", None, QtGui.QApplication.UnicodeUTF8))
self.groupBox.setTitle(QtGui.QApplication.translate("Settings", "Image", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Settings", "Height:", None, QtGui.QApplication.UnicodeUTF8))
self.cbKindSize.setItemText(0, QtGui.QApplication.translate("Settings", "px", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Settings", "Width:", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Settings", "History depth:", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tabGeneral), QtGui.QApplication.translate("Settings", "General", None, QtGui.QApplication.UnicodeUTF8))
|
{
"content_hash": "dd819ba1f7fe48ee4a6aec8940fd74fb",
"timestamp": "",
"source": "github",
"line_count": 77,
"max_line_length": 165,
"avg_line_length": 57.45454545454545,
"alnum_prop": 0.7072784810126582,
"repo_name": "gil9red/fake-painter",
"id": "8fc1479dd6c2fbe3278b7be8cbd9b46a36bccdab",
"size": "4665",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "settings_ui.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "296"
},
{
"name": "Python",
"bytes": "109256"
}
],
"symlink_target": ""
}
|
import webob
from jacket.api.compute import openstack as openstack_api
from jacket.compute import test
from jacket.tests.compute.unit.api.openstack import fakes
class MapperTest(test.NoDBTestCase):
def test_resource_project_prefix(self):
class Controller(object):
def index(self, req):
return 'foo'
app = fakes.TestRouter(Controller(),
openstack_api.ProjectMapper())
req = webob.Request.blank('/1234/tests')
resp = req.get_response(app)
self.assertEqual(b'foo', resp.body)
self.assertEqual(resp.status_int, 200)
def test_resource_no_project_prefix(self):
class Controller(object):
def index(self, req):
return 'foo'
app = fakes.TestRouter(Controller(),
openstack_api.PlainMapper())
req = webob.Request.blank('/tests')
resp = req.get_response(app)
self.assertEqual(b'foo', resp.body)
self.assertEqual(resp.status_int, 200)
|
{
"content_hash": "1e7c62acaa9d45774e022b6fc936bd90",
"timestamp": "",
"source": "github",
"line_count": 31,
"max_line_length": 61,
"avg_line_length": 33.87096774193548,
"alnum_prop": 0.6085714285714285,
"repo_name": "HybridF5/jacket",
"id": "3f82a6b69637f67997e6dc5b7e38b97bcaded0ac",
"size": "1686",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "jacket/tests/compute/unit/api/openstack/test_mapper.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "26995056"
},
{
"name": "Shell",
"bytes": "28464"
},
{
"name": "Smarty",
"bytes": "291947"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import, division, print_function
import contextlib
import warnings
import numpy as np
import pandas as pd
import pytest
from xarray import (Dataset, Variable, SerializationWarning, coding,
conventions, open_dataset)
from xarray.backends.common import WritableCFDataStore
from xarray.backends.memory import InMemoryDataStore
from xarray.conventions import decode_cf
from xarray.testing import assert_identical
from . import (
TestCase, assert_array_equal, raises_regex, requires_netCDF4,
requires_cftime_or_netCDF4, unittest, requires_dask)
from .test_backends import CFEncodedDataTest
class TestBoolTypeArray(TestCase):
def test_booltype_array(self):
x = np.array([1, 0, 1, 1, 0], dtype='i1')
bx = conventions.BoolTypeArray(x)
assert bx.dtype == np.bool
assert_array_equal(bx, np.array([True, False, True, True, False],
dtype=np.bool))
class TestNativeEndiannessArray(TestCase):
def test(self):
x = np.arange(5, dtype='>i8')
expected = np.arange(5, dtype='int64')
a = conventions.NativeEndiannessArray(x)
assert a.dtype == expected.dtype
assert a.dtype == expected[:].dtype
assert_array_equal(a, expected)
def test_decode_cf_with_conflicting_fill_missing_value():
expected = Variable(['t'], [np.nan, np.nan, 2], {'units': 'foobar'})
var = Variable(['t'], np.arange(3),
{'units': 'foobar',
'missing_value': 0,
'_FillValue': 1})
with warnings.catch_warnings(record=True) as w:
actual = conventions.decode_cf_variable('t', var)
assert_identical(actual, expected)
assert 'has multiple fill' in str(w[0].message)
expected = Variable(['t'], np.arange(10), {'units': 'foobar'})
var = Variable(['t'], np.arange(10),
{'units': 'foobar',
'missing_value': np.nan,
'_FillValue': np.nan})
actual = conventions.decode_cf_variable('t', var)
assert_identical(actual, expected)
var = Variable(['t'], np.arange(10),
{'units': 'foobar',
'missing_value': np.float32(np.nan),
'_FillValue': np.float32(np.nan)})
actual = conventions.decode_cf_variable('t', var)
assert_identical(actual, expected)
@requires_cftime_or_netCDF4
class TestEncodeCFVariable(TestCase):
def test_incompatible_attributes(self):
invalid_vars = [
Variable(['t'], pd.date_range('2000-01-01', periods=3),
{'units': 'foobar'}),
Variable(['t'], pd.to_timedelta(['1 day']), {'units': 'foobar'}),
Variable(['t'], [0, 1, 2], {'add_offset': 0}, {'add_offset': 2}),
Variable(['t'], [0, 1, 2], {'_FillValue': 0}, {'_FillValue': 2}),
]
for var in invalid_vars:
with pytest.raises(ValueError):
conventions.encode_cf_variable(var)
def test_missing_fillvalue(self):
v = Variable(['x'], np.array([np.nan, 1, 2, 3]))
v.encoding = {'dtype': 'int16'}
with pytest.warns(Warning, match='floating point data as an integer'):
conventions.encode_cf_variable(v)
def test_multidimensional_coordinates(self):
# regression test for GH1763
# Set up test case with coordinates that have overlapping (but not
# identical) dimensions.
zeros1 = np.zeros((1, 5, 3))
zeros2 = np.zeros((1, 6, 3))
zeros3 = np.zeros((1, 5, 4))
orig = Dataset({
'lon1': (['x1', 'y1'], zeros1.squeeze(0), {}),
'lon2': (['x2', 'y1'], zeros2.squeeze(0), {}),
'lon3': (['x1', 'y2'], zeros3.squeeze(0), {}),
'lat1': (['x1', 'y1'], zeros1.squeeze(0), {}),
'lat2': (['x2', 'y1'], zeros2.squeeze(0), {}),
'lat3': (['x1', 'y2'], zeros3.squeeze(0), {}),
'foo1': (['time', 'x1', 'y1'], zeros1,
{'coordinates': 'lon1 lat1'}),
'foo2': (['time', 'x2', 'y1'], zeros2,
{'coordinates': 'lon2 lat2'}),
'foo3': (['time', 'x1', 'y2'], zeros3,
{'coordinates': 'lon3 lat3'}),
'time': ('time', [0.], {'units': 'hours since 2017-01-01'}),
})
orig = conventions.decode_cf(orig)
# Encode the coordinates, as they would be in a netCDF output file.
enc, attrs = conventions.encode_dataset_coordinates(orig)
# Make sure we have the right coordinates for each variable.
foo1_coords = enc['foo1'].attrs.get('coordinates', '')
foo2_coords = enc['foo2'].attrs.get('coordinates', '')
foo3_coords = enc['foo3'].attrs.get('coordinates', '')
assert set(foo1_coords.split()) == set(['lat1', 'lon1'])
assert set(foo2_coords.split()) == set(['lat2', 'lon2'])
assert set(foo3_coords.split()) == set(['lat3', 'lon3'])
# Should not have any global coordinates.
assert 'coordinates' not in attrs
@requires_dask
def test_string_object_warning(self):
original = Variable(
('x',), np.array([u'foo', u'bar'], dtype=object)).chunk()
with pytest.warns(SerializationWarning,
match='dask array with dtype=object'):
encoded = conventions.encode_cf_variable(original)
assert_identical(original, encoded)
@requires_cftime_or_netCDF4
class TestDecodeCF(TestCase):
def test_dataset(self):
original = Dataset({
't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}),
'foo': ('t', [0, 0, 0], {'coordinates': 'y', 'units': 'bar'}),
'y': ('t', [5, 10, -999], {'_FillValue': -999})
})
expected = Dataset({'foo': ('t', [0, 0, 0], {'units': 'bar'})},
{'t': pd.date_range('2000-01-01', periods=3),
'y': ('t', [5.0, 10.0, np.nan])})
actual = conventions.decode_cf(original)
assert_identical(expected, actual)
def test_invalid_coordinates(self):
# regression test for GH308
original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'invalid'})})
actual = conventions.decode_cf(original)
assert_identical(original, actual)
def test_decode_coordinates(self):
# regression test for GH610
original = Dataset({'foo': ('t', [1, 2], {'coordinates': 'x'}),
'x': ('t', [4, 5])})
actual = conventions.decode_cf(original)
assert actual.foo.encoding['coordinates'] == 'x'
def test_0d_int32_encoding(self):
original = Variable((), np.int32(0), encoding={'dtype': 'int64'})
expected = Variable((), np.int64(0))
actual = conventions.maybe_encode_nonstring_dtype(original)
assert_identical(expected, actual)
def test_decode_cf_with_multiple_missing_values(self):
original = Variable(['t'], [0, 1, 2],
{'missing_value': np.array([0, 1])})
expected = Variable(['t'], [np.nan, np.nan, 2], {})
with warnings.catch_warnings(record=True) as w:
actual = conventions.decode_cf_variable('t', original)
assert_identical(expected, actual)
assert 'has multiple fill' in str(w[0].message)
def test_decode_cf_with_drop_variables(self):
original = Dataset({
't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}),
'x': ("x", [9, 8, 7], {'units': 'km'}),
'foo': (('t', 'x'), [[0, 0, 0], [1, 1, 1], [2, 2, 2]],
{'units': 'bar'}),
'y': ('t', [5, 10, -999], {'_FillValue': -999})
})
expected = Dataset({
't': pd.date_range('2000-01-01', periods=3),
'foo': (('t', 'x'), [[0, 0, 0], [1, 1, 1], [2, 2, 2]],
{'units': 'bar'}),
'y': ('t', [5, 10, np.nan])
})
actual = conventions.decode_cf(original, drop_variables=("x",))
actual2 = conventions.decode_cf(original, drop_variables="x")
assert_identical(expected, actual)
assert_identical(expected, actual2)
def test_invalid_time_units_raises_eagerly(self):
ds = Dataset({'time': ('time', [0, 1], {'units': 'foobar since 123'})})
with raises_regex(ValueError, 'unable to decode time'):
decode_cf(ds)
@requires_cftime_or_netCDF4
def test_dataset_repr_with_netcdf4_datetimes(self):
# regression test for #347
attrs = {'units': 'days since 0001-01-01', 'calendar': 'noleap'}
with warnings.catch_warnings():
warnings.filterwarnings('ignore', 'unable to decode time')
ds = decode_cf(Dataset({'time': ('time', [0, 1], attrs)}))
assert '(time) object' in repr(ds)
attrs = {'units': 'days since 1900-01-01'}
ds = decode_cf(Dataset({'time': ('time', [0, 1], attrs)}))
assert '(time) datetime64[ns]' in repr(ds)
@requires_cftime_or_netCDF4
def test_decode_cf_datetime_transition_to_invalid(self):
# manually create dataset with not-decoded date
from datetime import datetime
ds = Dataset(coords={'time': [0, 266 * 365]})
units = 'days since 2000-01-01 00:00:00'
ds.time.attrs = dict(units=units)
ds_decoded = conventions.decode_cf(ds)
expected = [datetime(2000, 1, 1, 0, 0),
datetime(2265, 10, 28, 0, 0)]
assert_array_equal(ds_decoded.time.values, expected)
@requires_dask
def test_decode_cf_with_dask(self):
import dask.array as da
original = Dataset({
't': ('t', [0, 1, 2], {'units': 'days since 2000-01-01'}),
'foo': ('t', [0, 0, 0], {'coordinates': 'y', 'units': 'bar'}),
'bar': ('string2', [b'a', b'b']),
'baz': (('x'), [b'abc'], {'_Encoding': 'utf-8'}),
'y': ('t', [5, 10, -999], {'_FillValue': -999})
}).chunk()
decoded = conventions.decode_cf(original)
print(decoded)
assert all(isinstance(var.data, da.Array)
for name, var in decoded.variables.items()
if name not in decoded.indexes)
assert_identical(decoded, conventions.decode_cf(original).compute())
class CFEncodedInMemoryStore(WritableCFDataStore, InMemoryDataStore):
def encode_variable(self, var):
"""encode one variable"""
coder = coding.strings.EncodedStringCoder(allows_unicode=True)
var = coder.encode(var)
return var
@requires_netCDF4
class TestCFEncodedDataStore(CFEncodedDataTest, TestCase):
@contextlib.contextmanager
def create_store(self):
yield CFEncodedInMemoryStore()
@contextlib.contextmanager
def roundtrip(self, data, save_kwargs={}, open_kwargs={},
allow_cleanup_failure=False):
store = CFEncodedInMemoryStore()
data.dump_to_store(store, **save_kwargs)
yield open_dataset(store, **open_kwargs)
def test_roundtrip_coordinates(self):
raise unittest.SkipTest('cannot roundtrip coordinates yet for '
'CFEncodedInMemoryStore')
def test_invalid_dataarray_names_raise(self):
pass
def test_encoding_kwarg(self):
pass
|
{
"content_hash": "915ac0d190acf7664fb12a52a1c98067",
"timestamp": "",
"source": "github",
"line_count": 275,
"max_line_length": 79,
"avg_line_length": 41.42181818181818,
"alnum_prop": 0.556491967342639,
"repo_name": "jcmgray/xarray",
"id": "62ff8d7ee1a1753d855b0d29d00b60636653e9af",
"size": "11415",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "xarray/tests/test_conventions.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PowerShell",
"bytes": "3149"
},
{
"name": "Python",
"bytes": "1893404"
}
],
"symlink_target": ""
}
|
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from textwrap import dedent
from typing import TYPE_CHECKING, Optional, Sequence
import simplesqlite
from simplesqlite import SimpleSQLite
from tabledata import TableData
from typepy import String
from .._common import ResultLogger
try:
import ujson as json
except ImportError:
import json # type: ignore
if TYPE_CHECKING:
from ._base import SourceInfo # noqa
class TableCreator:
def __init__(
self,
logger,
dst_con: SimpleSQLite,
add_pri_key_name: Optional[str],
result_logger: ResultLogger,
verbosity_level: int,
max_workers: int,
) -> None:
self.__logger = logger
self.__dst_con = dst_con
self.__add_pri_key_name = add_pri_key_name
self.__result_logger = result_logger
self.__verbosity_level = verbosity_level
self.__max_workers = max_workers
def create(
self, table_data: TableData, index_list: Sequence[str], source_info: "SourceInfo"
) -> None:
con_mem = simplesqlite.connect_memdb(max_workers=self.__max_workers)
con_mem.create_table_from_tabledata(
table_data,
primary_key=self.__add_pri_key_name,
add_primary_key_column=String(self.__add_pri_key_name).is_type(),
)
src_table_name = con_mem.fetch_table_names()[0]
dst_table_name = src_table_name
if self.__require_rename_table(con_mem, src_table_name):
dst_table_name = self.__make_unique_table_name(src_table_name)
self.__logger.debug(f"rename table from '{src_table_name}' to '{dst_table_name}'")
is_create_table = True
simplesqlite.copy_table(
src_con=con_mem,
dst_con=self.__dst_con,
src_table_name=src_table_name,
dst_table_name=dst_table_name,
)
else:
is_create_table = not self.__dst_con.has_table(dst_table_name)
simplesqlite.append_table(
src_con=con_mem, dst_con=self.__dst_con, table_name=dst_table_name
)
self.__dst_con.create_index_list(dst_table_name, index_list)
self.__result_logger.logging_success(
source_info.get_name(self.__verbosity_level), dst_table_name, is_create_table
)
def __require_rename_table(self, src_con: SimpleSQLite, src_table_name: str) -> bool:
if not self.__dst_con.has_table(src_table_name):
return False
lhs = self.__dst_con.schema_extractor.fetch_table_schema(src_table_name).as_dict()
rhs = src_con.schema_extractor.fetch_table_schema(src_table_name).as_dict()
if lhs != rhs:
self.__logger.debug(
dedent(
"""\
require rename '{table}' because of src table and dst table has
a different schema with the same table name:
dst-schema={dst_schema}
src-schema={src_schema}
"""
).format(
table=src_table_name,
src_schema=json.dumps(lhs, indent=4, ensure_ascii=False),
dst_schema=json.dumps(rhs, indent=4, ensure_ascii=False),
)
)
return True
return False
def __make_unique_table_name(self, table_name_base: str) -> str:
exist_table_names = self.__dst_con.fetch_table_names()
if table_name_base not in exist_table_names:
return table_name_base
suffix_id = 1
while True:
table_name_candidate = f"{table_name_base:s}_{suffix_id:d}"
if table_name_candidate not in exist_table_names:
return table_name_candidate
suffix_id += 1
|
{
"content_hash": "5eba6c419fe513c1c3f7daa52fc7324d",
"timestamp": "",
"source": "github",
"line_count": 120,
"max_line_length": 94,
"avg_line_length": 32.325,
"alnum_prop": 0.5743748388759989,
"repo_name": "thombashi/sqlitebiter",
"id": "35b89da528b820e471051e27a4fd5e4cc1a529f9",
"size": "3879",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sqlitebiter/converter/_table_creator.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "110"
},
{
"name": "Dockerfile",
"bytes": "559"
},
{
"name": "Jupyter Notebook",
"bytes": "48962"
},
{
"name": "Makefile",
"bytes": "2262"
},
{
"name": "PowerShell",
"bytes": "415"
},
{
"name": "Python",
"bytes": "151333"
},
{
"name": "Shell",
"bytes": "4402"
}
],
"symlink_target": ""
}
|
import tornado.web
from traffic_cloud_utils.pm import default_config_dict
from baseHandler import BaseHandler
class DefaultConfigHandler(BaseHandler):
"""
@api {get} /defaultConfig/ Default Configuration
@apiName Default Configuration
@apiVersion 0.1.0
@apiGroup Configuration
@apiDescription Calling this route will return the default values for the configuration files used by the server. This is useful if you want to display the values of the configuration parameters to the user, without hardcoding the values and potentially being incorrect if default values on the server change.
@apiSuccess {Integer} max_features_per_frame This is the maximum number of features to track per frame.
@apiSuccess {Integer} num_displacement_frames This parameter determines how long features will be tracked.
@apiSuccess {Number} min_feature_displacement This is the displacement needed to track a feature.
@apiSuccess {Integer} max_iterations_to_persist This is the maximum number of iterations that an unmoving feature should persist.
@apiSuccess {Integer} min_feature_frames This is the minimum number of frames that a feature must persist in order to be considered a feature.
@apiSuccess {Number} max_connection_distance This is the maximum distance that two features can be apart and still be considered part of the same object.
@apiSuccess {Number} max_segmentation_distance This is the maximum distance that two features that are moving relative to each other can be apart and still be considered part of the same object.
@apiError error_message The error message to display.
"""
def get(self):
self.finish(default_config_dict())
|
{
"content_hash": "dfb97726474b5effebcb0576542e504b",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 313,
"avg_line_length": 61.142857142857146,
"alnum_prop": 0.7745327102803738,
"repo_name": "santosfamilyfoundation/SantosCloud",
"id": "6e4ff4cc6723b584b6b01fb716557fc0c217cf94",
"size": "1735",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "app/handlers/defaultConfig.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "11070"
},
{
"name": "HTML",
"bytes": "29023"
},
{
"name": "JavaScript",
"bytes": "100421"
},
{
"name": "Python",
"bytes": "282219"
},
{
"name": "Shell",
"bytes": "79"
}
],
"symlink_target": ""
}
|
import sys
from typing import Any, List, Sequence, Tuple, Union, TYPE_CHECKING
from numpy import dtype
from ._shape import _ShapeLike
if sys.version_info >= (3, 8):
from typing import Protocol, TypedDict
HAVE_PROTOCOL = True
else:
try:
from typing_extensions import Protocol, TypedDict
except ImportError:
HAVE_PROTOCOL = False
else:
HAVE_PROTOCOL = True
_DTypeLikeNested = Any # TODO: wait for support for recursive types
if TYPE_CHECKING or HAVE_PROTOCOL:
# Mandatory keys
class _DTypeDictBase(TypedDict):
names: Sequence[str]
formats: Sequence[_DTypeLikeNested]
# Mandatory + optional keys
class _DTypeDict(_DTypeDictBase, total=False):
offsets: Sequence[int]
titles: Sequence[Any] # Only `str` elements are usable as indexing aliases, but all objects are legal
itemsize: int
aligned: bool
# A protocol for anything with the dtype attribute
class _SupportsDType(Protocol):
dtype: _DTypeLikeNested
else:
_DTypeDict = Any
_SupportsDType = Any
# Would create a dtype[np.void]
_VoidDTypeLike = Union[
# (flexible_dtype, itemsize)
Tuple[_DTypeLikeNested, int],
# (fixed_dtype, shape)
Tuple[_DTypeLikeNested, _ShapeLike],
# [(field_name, field_dtype, field_shape), ...]
#
# The type here is quite broad because NumPy accepts quite a wide
# range of inputs inside the list; see the tests for some
# examples.
List[Any],
# {'names': ..., 'formats': ..., 'offsets': ..., 'titles': ...,
# 'itemsize': ...}
_DTypeDict,
# (base_dtype, new_dtype)
Tuple[_DTypeLikeNested, _DTypeLikeNested],
]
# Anything that can be coerced into numpy.dtype.
# Reference: https://docs.scipy.org/doc/numpy/reference/arrays.dtypes.html
DTypeLike = Union[
dtype,
# default data type (float64)
None,
# array-scalar types and generic types
type, # TODO: enumerate these when we add type hints for numpy scalars
# anything with a dtype attribute
_SupportsDType,
# character codes, type strings or comma-separated fields, e.g., 'float64'
str,
_VoidDTypeLike,
]
# NOTE: while it is possible to provide the dtype as a dict of
# dtype-like objects (e.g. `{'field1': ..., 'field2': ..., ...}`),
# this syntax is officially discourged and
# therefore not included in the Union defining `DTypeLike`.
#
# See https://github.com/numpy/numpy/issues/16891 for more details.
|
{
"content_hash": "d60ea633e1670a3e69b7e1b446b9fb97",
"timestamp": "",
"source": "github",
"line_count": 81,
"max_line_length": 110,
"avg_line_length": 30.567901234567902,
"alnum_prop": 0.6680129240710824,
"repo_name": "grlee77/numpy",
"id": "1953bd5fcfcc668243f655631b5b00810f8740e0",
"size": "2476",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "numpy/typing/_dtype_like.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "4717095"
},
{
"name": "C++",
"bytes": "429200"
},
{
"name": "Fortran",
"bytes": "11108"
},
{
"name": "JavaScript",
"bytes": "16928"
},
{
"name": "Makefile",
"bytes": "4290"
},
{
"name": "Python",
"bytes": "8787953"
},
{
"name": "Shell",
"bytes": "9184"
},
{
"name": "Smarty",
"bytes": "3835"
},
{
"name": "sed",
"bytes": "5741"
}
],
"symlink_target": ""
}
|
import copy
import functools
import uuid
import six
from glance.common import exception
import glance.openstack.common.log as logging
from glance.openstack.common import timeutils
LOG = logging.getLogger(__name__)
DATA = {
'images': {},
'members': {},
'metadef_namespace_resource_types': [],
'metadef_namespaces': [],
'metadef_objects': [],
'metadef_properties': [],
'metadef_resource_types': [],
'tags': {},
'locations': [],
'tasks': {},
'task_info': {}
}
INDEX = 0
def log_call(func):
@functools.wraps(func)
def wrapped(*args, **kwargs):
LOG.info(_('Calling %(funcname)s: args=%(args)s, kwargs=%(kwargs)s') %
{"funcname": func.__name__,
"args": args,
"kwargs": kwargs})
output = func(*args, **kwargs)
LOG.info(_('Returning %(funcname)s: %(output)s') %
{"funcname": func.__name__,
"output": output})
return output
return wrapped
def reset():
global DATA
DATA = {
'images': {},
'members': [],
'metadef_namespace_resource_types': [],
'metadef_namespaces': [],
'metadef_objects': [],
'metadef_properties': [],
'metadef_resource_types': [],
'tags': {},
'locations': [],
'tasks': {},
'task_info': {}
}
def clear_db_env(*args, **kwargs):
"""
Setup global environment configuration variables.
We have no connection-oriented environment variables, so this is a NOOP.
"""
pass
def _get_session():
return DATA
def _image_location_format(image_id, value, meta_data, status, deleted=False):
dt = timeutils.utcnow()
return {
'id': str(uuid.uuid4()),
'image_id': image_id,
'created_at': dt,
'updated_at': dt,
'deleted_at': dt if deleted else None,
'deleted': deleted,
'url': value,
'metadata': meta_data,
'status': status,
}
def _image_property_format(image_id, name, value):
return {
'image_id': image_id,
'name': name,
'value': value,
'deleted': False,
'deleted_at': None,
}
def _image_member_format(image_id, tenant_id, can_share, status='pending'):
dt = timeutils.utcnow()
return {
'id': str(uuid.uuid4()),
'image_id': image_id,
'member': tenant_id,
'can_share': can_share,
'status': status,
'created_at': dt,
'updated_at': dt,
}
def _pop_task_info_values(values):
task_info_values = {}
for k, v in values.items():
if k in ['input', 'result', 'message']:
values.pop(k)
task_info_values[k] = v
return task_info_values
def _format_task_from_db(task_ref, task_info_ref):
task = copy.deepcopy(task_ref)
if task_info_ref:
task_info = copy.deepcopy(task_info_ref)
task_info_values = _pop_task_info_values(task_info)
task.update(task_info_values)
return task
def _task_format(task_id, **values):
dt = timeutils.utcnow()
task = {
'id': task_id,
'type': 'import',
'status': 'pending',
'owner': None,
'expires_at': None,
'created_at': dt,
'updated_at': dt,
'deleted_at': None,
'deleted': False,
}
task.update(values)
return task
def _task_info_format(task_id, **values):
task_info = {
'task_id': task_id,
'input': None,
'result': None,
'message': None,
}
task_info.update(values)
return task_info
def _image_format(image_id, **values):
dt = timeutils.utcnow()
image = {
'id': image_id,
'name': None,
'owner': None,
'locations': [],
'status': 'queued',
'protected': False,
'is_public': False,
'container_format': None,
'disk_format': None,
'min_ram': 0,
'min_disk': 0,
'size': None,
'virtual_size': None,
'checksum': None,
'tags': [],
'created_at': dt,
'updated_at': dt,
'deleted_at': None,
'deleted': False,
}
locations = values.pop('locations', None)
if locations is not None:
image['locations'] = []
for location in locations:
location_ref = _image_location_format(image_id,
location['url'],
location['metadata'],
location['status'])
image['locations'].append(location_ref)
DATA['locations'].append(location_ref)
#NOTE(bcwaldon): store properties as a list to match sqlalchemy driver
properties = values.pop('properties', {})
properties = [{'name': k,
'value': v,
'image_id': image_id,
'deleted': False} for k, v in properties.items()]
image['properties'] = properties
image.update(values)
return image
def _filter_images(images, filters, context,
status='accepted', is_public=None,
admin_as_user=False):
filtered_images = []
if 'properties' in filters:
prop_filter = filters.pop('properties')
filters.update(prop_filter)
if status == 'all':
status = None
visibility = filters.pop('visibility', None)
for image in images:
member = image_member_find(context, image_id=image['id'],
member=context.owner, status=status)
is_member = len(member) > 0
has_ownership = context.owner and image['owner'] == context.owner
can_see = (image['is_public'] or has_ownership or is_member or
(context.is_admin and not admin_as_user))
if not can_see:
continue
if visibility:
if visibility == 'public':
if not image['is_public']:
continue
elif visibility == 'private':
if image['is_public']:
continue
if not (has_ownership or (context.is_admin
and not admin_as_user)):
continue
elif visibility == 'shared':
if not is_member:
continue
if is_public is not None:
if not image['is_public'] == is_public:
continue
to_add = True
for k, value in six.iteritems(filters):
key = k
if k.endswith('_min') or k.endswith('_max'):
key = key[0:-4]
try:
value = int(value)
except ValueError:
msg = _("Unable to filter on a range "
"with a non-numeric value.")
raise exception.InvalidFilterRangeValue(msg)
if k.endswith('_min'):
to_add = image.get(key) >= value
elif k.endswith('_max'):
to_add = image.get(key) <= value
elif k != 'is_public' and image.get(k) is not None:
to_add = image.get(key) == value
elif k == 'tags':
filter_tags = value
image_tags = image_tag_get_all(context, image['id'])
for tag in filter_tags:
if tag not in image_tags:
to_add = False
break
else:
to_add = False
for p in image['properties']:
properties = {p['name']: p['value'],
'deleted': p['deleted']}
to_add |= (properties.get(key) == value and
properties.get('deleted') is False)
if not to_add:
break
if to_add:
filtered_images.append(image)
return filtered_images
def _do_pagination(context, images, marker, limit, show_deleted,
status='accepted'):
start = 0
end = -1
if marker is None:
start = 0
else:
# Check that the image is accessible
_image_get(context, marker, force_show_deleted=show_deleted,
status=status)
for i, image in enumerate(images):
if image['id'] == marker:
start = i + 1
break
else:
raise exception.NotFound()
end = start + limit if limit is not None else None
return images[start:end]
def _sort_images(images, sort_key, sort_dir):
reverse = False
if images and not (sort_key in images[0]):
raise exception.InvalidSortKey()
keyfn = lambda x: (x[sort_key] if x[sort_key] is not None else '',
x['created_at'], x['id'])
reverse = sort_dir == 'desc'
images.sort(key=keyfn, reverse=reverse)
return images
def _image_get(context, image_id, force_show_deleted=False, status=None):
try:
image = DATA['images'][image_id]
except KeyError:
LOG.info(_('Could not find image %s') % image_id)
raise exception.NotFound()
if image['deleted'] and not (force_show_deleted or context.show_deleted):
LOG.info(_('Unable to get deleted image'))
raise exception.NotFound()
if not is_image_visible(context, image):
LOG.info(_('Unable to get unowned image'))
raise exception.Forbidden("Image not visible to you")
return image
@log_call
def image_get(context, image_id, session=None, force_show_deleted=False):
image = _image_get(context, image_id, force_show_deleted)
return _normalize_locations(copy.deepcopy(image),
force_show_deleted=force_show_deleted)
@log_call
def image_get_all(context, filters=None, marker=None, limit=None,
sort_key='created_at', sort_dir='desc',
member_status='accepted', is_public=None,
admin_as_user=False, return_tag=False):
filters = filters or {}
images = DATA['images'].values()
images = _filter_images(images, filters, context, member_status,
is_public, admin_as_user)
images = _sort_images(images, sort_key, sort_dir)
images = _do_pagination(context, images, marker, limit,
filters.get('deleted'))
force_show_deleted = True if filters.get('deleted') else False
res = []
for image in images:
img = _normalize_locations(copy.deepcopy(image),
force_show_deleted=force_show_deleted)
if return_tag:
img['tags'] = image_tag_get_all(context, img['id'])
res.append(img)
return res
@log_call
def image_property_create(context, values):
image = _image_get(context, values['image_id'])
prop = _image_property_format(values['image_id'],
values['name'],
values['value'])
image['properties'].append(prop)
return prop
@log_call
def image_property_delete(context, prop_ref, image_ref):
prop = None
for p in DATA['images'][image_ref]['properties']:
if p['name'] == prop_ref:
prop = p
if not prop:
raise exception.NotFound()
prop['deleted_at'] = timeutils.utcnow()
prop['deleted'] = True
return prop
@log_call
def image_member_find(context, image_id=None, member=None, status=None):
filters = []
images = DATA['images']
members = DATA['members']
def is_visible(member):
return (member['member'] == context.owner or
images[member['image_id']]['owner'] == context.owner)
if not context.is_admin:
filters.append(is_visible)
if image_id is not None:
filters.append(lambda m: m['image_id'] == image_id)
if member is not None:
filters.append(lambda m: m['member'] == member)
if status is not None:
filters.append(lambda m: m['status'] == status)
for f in filters:
members = filter(f, members)
return [copy.deepcopy(m) for m in members]
@log_call
def image_member_count(context, image_id):
"""Return the number of image members for this image
:param image_id: identifier of image entity
"""
if not image_id:
msg = _("Image id is required.")
raise exception.Invalid(msg)
members = DATA['members']
return len(filter(lambda x: x['image_id'] == image_id, members))
@log_call
def image_member_create(context, values):
member = _image_member_format(values['image_id'],
values['member'],
values.get('can_share', False),
values.get('status', 'pending'))
global DATA
DATA['members'].append(member)
return copy.deepcopy(member)
@log_call
def image_member_update(context, member_id, values):
global DATA
for member in DATA['members']:
if (member['id'] == member_id):
member.update(values)
member['updated_at'] = timeutils.utcnow()
return copy.deepcopy(member)
else:
raise exception.NotFound()
@log_call
def image_member_delete(context, member_id):
global DATA
for i, member in enumerate(DATA['members']):
if (member['id'] == member_id):
del DATA['members'][i]
break
else:
raise exception.NotFound()
@log_call
def image_location_add(context, image_id, location):
deleted = location['status'] in ('deleted', 'pending_delete')
location_ref = _image_location_format(image_id,
value=location['url'],
meta_data=location['metadata'],
status=location['status'],
deleted=deleted)
DATA['locations'].append(location_ref)
image = DATA['images'][image_id]
image.setdefault('locations', []).append(location_ref)
@log_call
def image_location_update(context, image_id, location):
loc_id = location.get('id')
if loc_id is None:
msg = _("The location data has an invalid ID: %d") % loc_id
raise exception.Invalid(msg)
deleted = location['status'] in ('deleted', 'pending_delete')
updated_time = timeutils.utcnow()
delete_time = updated_time if deleted else None
updated = False
for loc in DATA['locations']:
if (loc['id'] == loc_id and loc['image_id'] == image_id):
loc.update({"value": location['url'],
"meta_data": location['metadata'],
"status": location['status'],
"deleted": deleted,
"updated_at": updated_time,
"deleted_at": delete_time})
updated = True
break
if not updated:
msg = (_("No location found with ID %(loc)s from image %(img)s") %
dict(loc=loc_id, img=image_id))
LOG.warn(msg)
raise exception.NotFound(msg)
@log_call
def image_location_delete(context, image_id, location_id, status,
delete_time=None):
if status not in ('deleted', 'pending_delete'):
msg = _("The status of deleted image location can only be set to "
"'pending_delete' or 'deleted'.")
raise exception.Invalid(msg)
deleted = False
for loc in DATA['locations']:
if (loc['id'] == location_id and loc['image_id'] == image_id):
deleted = True
delete_time = delete_time or timeutils.utcnow()
loc.update({"deleted": deleted,
"status": status,
"updated_at": delete_time,
"deleted_at": delete_time})
break
if not deleted:
msg = (_("No location found with ID %(loc)s from image %(img)s") %
dict(loc=location_id, img=image_id))
LOG.warn(msg)
raise exception.NotFound(msg)
def _image_locations_set(context, image_id, locations):
# NOTE(zhiyan): 1. Remove records from DB for deleted locations
used_loc_ids = [loc['id'] for loc in locations if loc.get('id')]
image = DATA['images'][image_id]
for loc in image['locations']:
if loc['id'] not in used_loc_ids and not loc['deleted']:
image_location_delete(context, image_id, loc['id'], 'deleted')
for i, loc in enumerate(DATA['locations']):
if (loc['image_id'] == image_id and loc['id'] not in used_loc_ids and
not loc['deleted']):
del DATA['locations'][i]
# NOTE(zhiyan): 2. Adding or update locations
for loc in locations:
if loc.get('id') is None:
image_location_add(context, image_id, loc)
else:
image_location_update(context, image_id, loc)
def _image_locations_delete_all(context, image_id, delete_time=None):
image = DATA['images'][image_id]
for loc in image['locations']:
if not loc['deleted']:
image_location_delete(context, image_id, loc['id'], 'deleted',
delete_time=delete_time)
for i, loc in enumerate(DATA['locations']):
if image_id == loc['image_id'] and loc['deleted'] == False:
del DATA['locations'][i]
def _normalize_locations(image, force_show_deleted=False):
"""
Generate suitable dictionary list for locations field of image.
We don't need to set other data fields of location record which return
from image query.
"""
if force_show_deleted:
locations = image['locations']
else:
locations = filter(lambda x: not x['deleted'], image['locations'])
image['locations'] = [{'id': loc['id'],
'url': loc['url'],
'metadata': loc['metadata'],
'status': loc['status']}
for loc in locations]
return image
@log_call
def image_create(context, image_values):
global DATA
image_id = image_values.get('id', str(uuid.uuid4()))
if image_id in DATA['images']:
raise exception.Duplicate()
if 'status' not in image_values:
raise exception.Invalid('status is a required attribute')
allowed_keys = set(['id', 'name', 'status', 'min_ram', 'min_disk', 'size',
'virtual_size', 'checksum', 'locations', 'owner',
'protected', 'is_public', 'container_format',
'disk_format', 'created_at', 'updated_at', 'deleted',
'deleted_at', 'properties', 'tags'])
incorrect_keys = set(image_values.keys()) - allowed_keys
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
image = _image_format(image_id, **image_values)
DATA['images'][image_id] = image
DATA['tags'][image_id] = image.pop('tags', [])
return _normalize_locations(copy.deepcopy(image))
@log_call
def image_update(context, image_id, image_values, purge_props=False,
from_state=None):
global DATA
try:
image = DATA['images'][image_id]
except KeyError:
raise exception.NotFound()
location_data = image_values.pop('locations', None)
if location_data is not None:
_image_locations_set(context, image_id, location_data)
# replace values for properties that already exist
new_properties = image_values.pop('properties', {})
for prop in image['properties']:
if prop['name'] in new_properties:
prop['value'] = new_properties.pop(prop['name'])
elif purge_props:
# this matches weirdness in the sqlalchemy api
prop['deleted'] = True
# add in any completely new properties
image['properties'].extend([{'name': k, 'value': v,
'image_id': image_id, 'deleted': False}
for k, v in new_properties.items()])
image['updated_at'] = timeutils.utcnow()
image.update(image_values)
DATA['images'][image_id] = image
return _normalize_locations(copy.deepcopy(image))
@log_call
def image_destroy(context, image_id):
global DATA
try:
delete_time = timeutils.utcnow()
DATA['images'][image_id]['deleted'] = True
DATA['images'][image_id]['deleted_at'] = delete_time
# NOTE(flaper87): Move the image to one of the deleted statuses
# if it hasn't been done yet.
if (DATA['images'][image_id]['status'] not in
['deleted', 'pending_delete']):
DATA['images'][image_id]['status'] = 'deleted'
_image_locations_delete_all(context, image_id,
delete_time=delete_time)
for prop in DATA['images'][image_id]['properties']:
image_property_delete(context, prop['name'], image_id)
members = image_member_find(context, image_id=image_id)
for member in members:
image_member_delete(context, member['id'])
tags = image_tag_get_all(context, image_id)
for tag in tags:
image_tag_delete(context, image_id, tag)
return _normalize_locations(copy.deepcopy(DATA['images'][image_id]))
except KeyError:
raise exception.NotFound()
@log_call
def image_tag_get_all(context, image_id):
return DATA['tags'].get(image_id, [])
@log_call
def image_tag_get(context, image_id, value):
tags = image_tag_get_all(context, image_id)
if value in tags:
return value
else:
raise exception.NotFound()
@log_call
def image_tag_set_all(context, image_id, values):
global DATA
DATA['tags'][image_id] = values
@log_call
def image_tag_create(context, image_id, value):
global DATA
DATA['tags'][image_id].append(value)
return value
@log_call
def image_tag_delete(context, image_id, value):
global DATA
try:
DATA['tags'][image_id].remove(value)
except ValueError:
raise exception.NotFound()
def is_image_mutable(context, image):
"""Return True if the image is mutable in this context."""
# Is admin == image mutable
if context.is_admin:
return True
# No owner == image not mutable
if image['owner'] is None or context.owner is None:
return False
# Image only mutable by its owner
return image['owner'] == context.owner
def is_image_visible(context, image, status=None):
"""Return True if the image is visible in this context."""
# Is admin == image visible
if context.is_admin:
return True
# No owner == image visible
if image['owner'] is None:
return True
# Image is_public == image visible
if image['is_public']:
return True
# Perform tests based on whether we have an owner
if context.owner is not None:
if context.owner == image['owner']:
return True
# Figure out if this image is shared with that tenant
if status == 'all':
status = None
members = image_member_find(context,
image_id=image['id'],
member=context.owner,
status=status)
if members:
return True
# Private image
return False
def user_get_storage_usage(context, owner_id, image_id=None, session=None):
images = image_get_all(context, filters={'owner': owner_id})
total = 0
for image in images:
if image['status'] in ['killed', 'deleted']:
continue
if image['id'] != image_id:
locations = [loc for loc in image['locations']
if loc.get('status') != 'deleted']
total += (image['size'] * len(locations))
return total
@log_call
def task_create(context, values):
"""Create a task object"""
global DATA
task_values = copy.deepcopy(values)
task_id = task_values.get('id', str(uuid.uuid4()))
required_attributes = ['type', 'status', 'input']
allowed_attributes = ['id', 'type', 'status', 'input', 'result', 'owner',
'message', 'expires_at', 'created_at',
'updated_at', 'deleted_at', 'deleted']
if task_id in DATA['tasks']:
raise exception.Duplicate()
for key in required_attributes:
if key not in task_values:
raise exception.Invalid('%s is a required attribute' % key)
incorrect_keys = set(task_values.keys()) - set(allowed_attributes)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
task_info_values = _pop_task_info_values(task_values)
task = _task_format(task_id, **task_values)
DATA['tasks'][task_id] = task
task_info = _task_info_create(task['id'], task_info_values)
return _format_task_from_db(task, task_info)
@log_call
def task_update(context, task_id, values):
"""Update a task object"""
global DATA
task_values = copy.deepcopy(values)
task_info_values = _pop_task_info_values(task_values)
try:
task = DATA['tasks'][task_id]
except KeyError:
msg = "No task found with ID %s" % task_id
LOG.debug(msg)
raise exception.TaskNotFound(task_id=task_id)
task.update(task_values)
task['updated_at'] = timeutils.utcnow()
DATA['tasks'][task_id] = task
task_info = _task_info_update(task['id'], task_info_values)
return _format_task_from_db(task, task_info)
@log_call
def task_get(context, task_id, force_show_deleted=False):
task, task_info = _task_get(context, task_id, force_show_deleted)
return _format_task_from_db(task, task_info)
def _task_get(context, task_id, force_show_deleted=False):
try:
task = DATA['tasks'][task_id]
except KeyError:
msg = _('Could not find task %s') % task_id
LOG.info(msg)
raise exception.TaskNotFound(task_id=task_id)
if task['deleted'] and not (force_show_deleted or context.show_deleted):
msg = _('Unable to get deleted task %s') % task_id
LOG.info(msg)
raise exception.TaskNotFound(task_id=task_id)
if not _is_task_visible(context, task):
msg = "Forbidding request, task %s is not visible" % task_id
LOG.debug(msg)
raise exception.Forbidden(msg)
task_info = _task_info_get(task_id)
return task, task_info
@log_call
def task_delete(context, task_id):
global DATA
try:
DATA['tasks'][task_id]['deleted'] = True
DATA['tasks'][task_id]['deleted_at'] = timeutils.utcnow()
DATA['tasks'][task_id]['updated_at'] = timeutils.utcnow()
return copy.deepcopy(DATA['tasks'][task_id])
except KeyError:
msg = "No task found with ID %s" % task_id
LOG.debug(msg)
raise exception.TaskNotFound(task_id=task_id)
@log_call
def task_get_all(context, filters=None, marker=None, limit=None,
sort_key='created_at', sort_dir='desc'):
"""
Get all tasks that match zero or more filters.
:param filters: dict of filter keys and values.
:param marker: task id after which to start page
:param limit: maximum number of tasks to return
:param sort_key: task attribute by which results should be sorted
:param sort_dir: direction in which results should be sorted (asc, desc)
:return: tasks set
"""
filters = filters or {}
tasks = DATA['tasks'].values()
tasks = _filter_tasks(tasks, filters, context)
tasks = _sort_tasks(tasks, sort_key, sort_dir)
tasks = _paginate_tasks(context, tasks, marker, limit,
filters.get('deleted'))
filtered_tasks = []
for task in tasks:
filtered_tasks.append(_format_task_from_db(task, task_info_ref=None))
return filtered_tasks
def _is_task_visible(context, task):
"""Return True if the task is visible in this context."""
# Is admin == task visible
if context.is_admin:
return True
# No owner == task visible
if task['owner'] is None:
return True
# Perform tests based on whether we have an owner
if context.owner is not None:
if context.owner == task['owner']:
return True
return False
def _filter_tasks(tasks, filters, context, admin_as_user=False):
filtered_tasks = []
for task in tasks:
has_ownership = context.owner and task['owner'] == context.owner
can_see = (has_ownership or (context.is_admin and not admin_as_user))
if not can_see:
continue
add = True
for k, value in six.iteritems(filters):
add = task[k] == value and task['deleted'] is False
if not add:
break
if add:
filtered_tasks.append(task)
return filtered_tasks
def _sort_tasks(tasks, sort_key, sort_dir):
reverse = False
if tasks and not (sort_key in tasks[0]):
raise exception.InvalidSortKey()
keyfn = lambda x: (x[sort_key] if x[sort_key] is not None else '',
x['created_at'], x['id'])
reverse = sort_dir == 'desc'
tasks.sort(key=keyfn, reverse=reverse)
return tasks
def _paginate_tasks(context, tasks, marker, limit, show_deleted):
start = 0
end = -1
if marker is None:
start = 0
else:
# Check that the task is accessible
_task_get(context, marker, force_show_deleted=show_deleted)
for i, task in enumerate(tasks):
if task['id'] == marker:
start = i + 1
break
else:
if task:
raise exception.TaskNotFound(task_id=task['id'])
else:
msg = _("Task does not exist")
raise exception.NotFound(message=msg)
end = start + limit if limit is not None else None
return tasks[start:end]
def _task_info_create(task_id, values):
"""Create a Task Info for Task with given task ID"""
global DATA
task_info = _task_info_format(task_id, **values)
DATA['task_info'][task_id] = task_info
return task_info
def _task_info_update(task_id, values):
"""Update Task Info for Task with given task ID and updated values"""
global DATA
try:
task_info = DATA['task_info'][task_id]
except KeyError:
msg = "No task info found with task id %s" % task_id
LOG.debug(msg)
raise exception.TaskNotFound(task_id=task_id)
task_info.update(values)
DATA['task_info'][task_id] = task_info
return task_info
def _task_info_get(task_id):
"""Get Task Info for Task with given task ID"""
global DATA
try:
task_info = DATA['task_info'][task_id]
except KeyError:
msg = _('Could not find task info %s') % task_id
LOG.info(msg)
raise exception.TaskNotFound(task_id=task_id)
return task_info
@log_call
def metadef_namespace_create(context, values):
"""Create a namespace object"""
global DATA
namespace_values = copy.deepcopy(values)
namespace_name = namespace_values.get('namespace')
required_attributes = ['namespace', 'owner']
allowed_attributes = ['namespace', 'owner', 'display_name', 'description',
'visibility', 'protected']
for namespace in DATA['metadef_namespaces']:
if namespace['namespace'] == namespace_name:
msg = ("Can not create the metadata definition namespace. "
"Namespace=%s already exists.") % namespace_name
LOG.debug(msg)
raise exception.MetadefDuplicateNamespace(
namespace_name=namespace_name)
for key in required_attributes:
if key not in namespace_values:
raise exception.Invalid('%s is a required attribute' % key)
incorrect_keys = set(namespace_values.keys()) - set(allowed_attributes)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
namespace = _format_namespace(namespace_values)
DATA['metadef_namespaces'].append(namespace)
return namespace
@log_call
def metadef_namespace_update(context, namespace_id, values):
"""Update a namespace object"""
global DATA
namespace_values = copy.deepcopy(values)
namespace = metadef_namespace_get_by_id(context, namespace_id)
if namespace['namespace'] != values['namespace']:
for db_namespace in DATA['metadef_namespaces']:
if db_namespace['namespace'] == values['namespace']:
msg = ("Invalid update. It would result in a duplicate"
" metadata definition namespace with the same"
" name of %s"
% values['namespace'])
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition namespace with the same"
" name of %s")
% values['namespace'])
raise exception.MetadefDuplicateNamespace(emsg)
DATA['metadef_namespaces'].remove(namespace)
namespace.update(namespace_values)
namespace['updated_at'] = timeutils.utcnow()
DATA['metadef_namespaces'].append(namespace)
return namespace
@log_call
def metadef_namespace_get_by_id(context, namespace_id):
"""Get a namespace object"""
try:
namespace = next(namespace for namespace in DATA['metadef_namespaces']
if namespace['id'] == namespace_id)
except StopIteration:
msg = (_("Metadata definition namespace not found for id=%s")
% namespace_id)
LOG.warn(msg)
raise exception.MetadefNamespaceNotFound(msg)
if not _is_namespace_visible(context, namespace):
msg = ("Forbidding request, metadata definition namespace=%s"
" is not visible.") % namespace.namespace
LOG.debug(msg)
emsg = _("Forbidding request, metadata definition namespace=%s"
" is not visible.") % namespace.namespace
raise exception.MetadefForbidden(emsg)
return namespace
@log_call
def metadef_namespace_get(context, namespace_name):
"""Get a namespace object"""
try:
namespace = next(namespace for namespace in DATA['metadef_namespaces']
if namespace['namespace'] == namespace_name)
except StopIteration:
msg = "No namespace found with name %s" % namespace_name
LOG.debug(msg)
raise exception.MetadefNamespaceNotFound(
namespace_name=namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
return namespace
@log_call
def metadef_namespace_get_all(context,
marker=None,
limit=None,
sort_key='created_at',
sort_dir='desc',
filters=None):
"""Get a namespaces list"""
resource_types = filters.get('resource_types', []) if filters else []
visibility = filters.get('visibility', None) if filters else None
namespaces = []
for namespace in DATA['metadef_namespaces']:
if not _is_namespace_visible(context, namespace):
continue
if visibility and namespace['visibility'] != visibility:
continue
if resource_types:
for association in DATA['metadef_namespace_resource_types']:
if association['namespace_id'] == namespace['id']:
if association['name'] in resource_types:
break
else:
continue
namespaces.append(namespace)
return namespaces
@log_call
def metadef_namespace_delete(context, namespace_name):
"""Delete a namespace object"""
global DATA
namespace = metadef_namespace_get(context, namespace_name)
DATA['metadef_namespaces'].remove(namespace)
return namespace
@log_call
def metadef_namespace_delete_content(context, namespace_name):
"""Delete a namespace content"""
global DATA
namespace = metadef_namespace_get(context, namespace_name)
namespace_id = namespace['id']
objects = []
for object in DATA['metadef_objects']:
if object['namespace_id'] != namespace_id:
objects.append(object)
DATA['metadef_objects'] = objects
properties = []
for property in DATA['metadef_objects']:
if property['namespace_id'] != namespace_id:
properties.append(object)
DATA['metadef_objects'] = properties
return namespace
@log_call
def metadef_object_get(context, namespace_name, object_name):
"""Get a metadef object"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
for object in DATA['metadef_objects']:
if (object['namespace_id'] == namespace['id'] and
object['name'] == object_name):
return object
else:
msg = ("The metadata definition object with name=%(name)s"
" was not found in namespace=%(namespace_name)s."
% {'name': object_name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exception.MetadefObjectNotFound(namespace_name=namespace_name,
object_name=object_name)
@log_call
def metadef_object_get_by_id(context, namespace_name, object_id):
"""Get a metadef object"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
for object in DATA['metadef_objects']:
if (object['namespace_id'] == namespace['id'] and
object['id'] == object_id):
return object
else:
msg = (_("Metadata definition object not found for id=%s")
% object_id)
LOG.warn(msg)
raise exception.MetadefObjectNotFound(msg)
@log_call
def metadef_object_get_all(context, namespace_name):
"""Get a metadef objects list"""
namespace = metadef_namespace_get(context, namespace_name)
objects = []
_check_namespace_visibility(context, namespace, namespace_name)
for object in DATA['metadef_objects']:
if object['namespace_id'] == namespace['id']:
objects.append(object)
return objects
@log_call
def metadef_object_create(context, namespace_name, values):
"""Create a metadef object"""
global DATA
object_values = copy.deepcopy(values)
object_name = object_values['name']
required_attributes = ['name']
allowed_attributes = ['name', 'description', 'json_schema', 'required']
namespace = metadef_namespace_get(context, namespace_name)
for object in DATA['metadef_objects']:
if (object['name'] == object_name and
object['namespace_id'] == namespace['id']):
msg = ("A metadata definition object with name=%(name)s"
" in namespace=%(namespace_name)s already exists."
% {'name': object_name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exception.MetadefDuplicateObject(
object_name=object_name, namespace_name=namespace_name)
for key in required_attributes:
if key not in object_values:
raise exception.Invalid('%s is a required attribute' % key)
incorrect_keys = set(object_values.keys()) - set(allowed_attributes)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
object_values['namespace_id'] = namespace['id']
_check_namespace_visibility(context, namespace, namespace_name)
object = _format_object(object_values)
DATA['metadef_objects'].append(object)
return object
@log_call
def metadef_object_update(context, namespace_name, object_id, values):
"""Update a metadef object"""
global DATA
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
object = metadef_object_get_by_id(context, namespace_name, object_id)
if object['name'] != values['name']:
for db_object in DATA['metadef_objects']:
if (db_object['name'] == values['name'] and
db_object['namespace_id'] == namespace['id']):
msg = ("Invalid update. It would result in a duplicate"
" metadata definition object with same name=%(name)s "
" in namespace=%(namespace_name)s."
% {'name': object['name'],
'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition object with the same"
" name=%(name)s "
" in namespace=%(namespace_name)s.")
% {'name': object['name'],
'namespace_name': namespace_name})
raise exception.MetadefDuplicateObject(emsg)
DATA['metadef_objects'].remove(object)
object.update(values)
object['updated_at'] = timeutils.utcnow()
DATA['metadef_objects'].append(object)
return object
@log_call
def metadef_object_delete(context, namespace_name, object_name):
"""Delete a metadef object"""
global DATA
object = metadef_object_get(context, namespace_name, object_name)
DATA['metadef_objects'].remove(object)
return object
@log_call
def metadef_object_count(context, namespace_name):
"""Get metadef object count in a namespace"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
count = 0
for object in DATA['metadef_objects']:
if object['namespace_id'] == namespace['id']:
count = count + 1
return count
@log_call
def metadef_property_count(context, namespace_name):
"""Get properties count in a namespace"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
count = 0
for property in DATA['metadef_properties']:
if property['namespace_id'] == namespace['id']:
count = count + 1
return count
@log_call
def metadef_property_create(context, namespace_name, values):
"""Create a metadef property"""
global DATA
property_values = copy.deepcopy(values)
property_name = property_values['name']
required_attributes = ['name']
allowed_attributes = ['name', 'description', 'json_schema', 'required']
namespace = metadef_namespace_get(context, namespace_name)
for property in DATA['metadef_properties']:
if (property['name'] == property_name and
property['namespace_id'] == namespace['id']):
msg = ("Can not create metadata definition property. A property"
" with name=%(name)s already exists in"
" namespace=%(namespace_name)s."
% {'name': property_name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exception.MetadefDuplicateProperty(
property_name=property_name,
namespace_name=namespace_name)
for key in required_attributes:
if key not in property_values:
raise exception.Invalid('%s is a required attribute' % key)
incorrect_keys = set(property_values.keys()) - set(allowed_attributes)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
property_values['namespace_id'] = namespace['id']
_check_namespace_visibility(context, namespace, namespace_name)
property = _format_property(property_values)
DATA['metadef_properties'].append(property)
return property
@log_call
def metadef_property_update(context, namespace_name, property_id, values):
"""Update a metadef property"""
global DATA
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
property = metadef_property_get_by_id(context, namespace_name, property_id)
if property['name'] != values['name']:
for db_property in DATA['metadef_properties']:
if (db_property['name'] == values['name'] and
db_property['namespace_id'] == namespace['id']):
msg = ("Invalid update. It would result in a duplicate"
" metadata definition property with the same"
" name=%(name)s"
" in namespace=%(namespace_name)s."
% {'name': property['name'],
'namespace_name': namespace_name})
LOG.debug(msg)
emsg = (_("Invalid update. It would result in a duplicate"
" metadata definition property with the same"
" name=%(name)s"
" in namespace=%(namespace_name)s.")
% {'name': property['name'],
'namespace_name': namespace_name})
raise exception.MetadefDuplicateProperty(emsg)
DATA['metadef_properties'].remove(property)
property.update(values)
property['updated_at'] = timeutils.utcnow()
DATA['metadef_properties'].append(property)
return property
@log_call
def metadef_property_get_all(context, namespace_name):
"""Get a metadef properties list"""
namespace = metadef_namespace_get(context, namespace_name)
properties = []
_check_namespace_visibility(context, namespace, namespace_name)
for property in DATA['metadef_properties']:
if property['namespace_id'] == namespace['id']:
properties.append(property)
return properties
@log_call
def metadef_property_get_by_id(context, namespace_name, property_id):
"""Get a metadef property"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
for property in DATA['metadef_properties']:
if (property['namespace_id'] == namespace['id'] and
property['id'] == property_id):
return property
else:
msg = (_("Metadata definition property not found for id=%s")
% property_id)
LOG.warn(msg)
raise exception.MetadefPropertyNotFound(msg)
@log_call
def metadef_property_get(context, namespace_name, property_name):
"""Get a metadef property"""
namespace = metadef_namespace_get(context, namespace_name)
_check_namespace_visibility(context, namespace, namespace_name)
for property in DATA['metadef_properties']:
if (property['namespace_id'] == namespace['id'] and
property['name'] == property_name):
return property
else:
msg = ("No property found with name=%(name)s in"
" namespace=%(namespace_name)s "
% {'name': property_name, 'namespace_name': namespace_name})
LOG.debug(msg)
raise exception.MetadefPropertyNotFound(namespace_name=namespace_name,
property_name=property_name)
@log_call
def metadef_property_delete(context, namespace_name, property_name):
"""Delete a metadef property"""
global DATA
property = metadef_property_get(context, namespace_name, property_name)
DATA['metadef_properties'].remove(property)
return property
@log_call
def metadef_resource_type_create(context, values):
"""Create a metadef resource type"""
global DATA
resource_type_values = copy.deepcopy(values)
resource_type_name = resource_type_values['name']
allowed_attrubites = ['name', 'protected']
for resource_type in DATA['metadef_resource_types']:
if resource_type['name'] == resource_type_name:
raise exception.Duplicate()
incorrect_keys = set(resource_type_values.keys()) - set(allowed_attrubites)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
resource_type = _format_resource_type(resource_type_values)
DATA['metadef_resource_types'].append(resource_type)
return resource_type
@log_call
def metadef_resource_type_get_all(context):
"""List all resource types"""
return DATA['metadef_resource_types']
@log_call
def metadef_resource_type_get(context, resource_type_name):
"""Get a resource type"""
try:
resource_type = next(resource_type for resource_type in
DATA['metadef_resource_types']
if resource_type['name'] ==
resource_type_name)
except StopIteration:
msg = "No resource type found with name %s" % resource_type_name
LOG.debug(msg)
raise exception.MetadefResourceTypeNotFound(
resource_type_name=resource_type_name)
return resource_type
@log_call
def metadef_resource_type_association_create(context, namespace_name,
values):
global DATA
association_values = copy.deepcopy(values)
namespace = metadef_namespace_get(context, namespace_name)
resource_type_name = association_values['name']
resource_type = metadef_resource_type_get(context,
resource_type_name)
required_attributes = ['name', 'properties_target', 'prefix']
allowed_attributes = copy.deepcopy(required_attributes)
for association in DATA['metadef_namespace_resource_types']:
if (association['namespace_id'] == namespace['id'] and
association['resource_type'] == resource_type['id']):
msg = ("The metadata definition resource-type association of"
" resource_type=%(resource_type_name)s to"
" namespace=%(namespace_name)s, already exists."
% {'resource_type_name': resource_type_name,
'namespace_name': namespace_name})
LOG.debug(msg)
raise exception.MetadefDuplicateResourceTypeAssociation(
resource_type_name=resource_type_name,
namespace_name=namespace_name)
for key in required_attributes:
if key not in association_values:
raise exception.Invalid('%s is a required attribute' % key)
incorrect_keys = set(association_values.keys()) - set(allowed_attributes)
if incorrect_keys:
raise exception.Invalid(
'The keys %s are not valid' % str(incorrect_keys))
association = _format_association(namespace, resource_type,
association_values)
DATA['metadef_namespace_resource_types'].append(association)
return association
@log_call
def metadef_resource_type_association_get(context, namespace_name,
resource_type_name):
namespace = metadef_namespace_get(context, namespace_name)
resource_type = metadef_resource_type_get(context, resource_type_name)
for association in DATA['metadef_namespace_resource_types']:
if (association['namespace_id'] == namespace['id'] and
association['resource_type'] == resource_type['id']):
return association
else:
msg = ("No resource type association found associated with namespace "
"%s and resource type %s" % namespace_name, resource_type_name)
LOG.debug(msg)
raise exception.MetadefResourceTypeAssociationNotFound(
resource_type_name=resource_type_name,
namespace_name=namespace_name)
@log_call
def metadef_resource_type_association_get_all_by_namespace(context,
namespace_name):
namespace = metadef_namespace_get(context, namespace_name)
namespace_resource_types = []
for resource_type in DATA['metadef_namespace_resource_types']:
if resource_type['namespace_id'] == namespace['id']:
namespace_resource_types.append(resource_type)
return namespace_resource_types
@log_call
def metadef_resource_type_association_delete(context, namespace_name,
resource_type_name):
global DATA
resource_type = metadef_resource_type_association_get(context,
namespace_name,
resource_type_name)
DATA['metadef_namespace_resource_types'].remove(resource_type)
return resource_type
def _format_association(namespace, resource_type, association_values):
association = {
'namespace_id': namespace['id'],
'resource_type': resource_type['id'],
'properties_target': None,
'prefix': None,
'created_at': timeutils.utcnow(),
'updated_at': timeutils.utcnow()
}
association.update(association_values)
return association
def _format_resource_type(values):
dt = timeutils.utcnow()
resource_type = {
'id': _get_metadef_id(),
'name': values['name'],
'protected': True,
'created_at': dt,
'updated_at': dt
}
resource_type.update(values)
return resource_type
def _format_property(values):
property = {
'id': _get_metadef_id(),
'namespace_id': None,
'name': None,
'json_schema': None
}
property.update(values)
return property
def _format_namespace(values):
dt = timeutils.utcnow()
namespace = {
'id': _get_metadef_id(),
'namespace': None,
'display_name': None,
'description': None,
'visibility': 'private',
'protected': False,
'owner': None,
'created_at': dt,
'updated_at': dt
}
namespace.update(values)
return namespace
def _format_object(values):
dt = timeutils.utcnow()
object = {
'id': _get_metadef_id(),
'namespace_id': None,
'name': None,
'description': None,
'json_schema': None,
'required': None,
'created_at': dt,
'updated_at': dt
}
object.update(values)
return object
def _is_namespace_visible(context, namespace):
"""Return true if namespace is visible in this context"""
if context.is_admin:
return True
if namespace.get('visibility', '') == 'public':
return True
if namespace['owner'] is None:
return True
if context.owner is not None:
if context.owner == namespace['owner']:
return True
return False
def _check_namespace_visibility(context, namespace, namespace_name):
if not _is_namespace_visible(context, namespace):
msg = ("Forbidding request, metadata definition namespace=%s"
" not visible." % namespace_name)
LOG.debug(msg)
emsg = _("Forbidding request, metadata definition namespace=%s"
" not visible.") % namespace_name
raise exception.MetadefForbidden(emsg)
def _get_metadef_id():
global INDEX
INDEX += 1
return INDEX
|
{
"content_hash": "f060173492ba227dd104aaa390e3f33f",
"timestamp": "",
"source": "github",
"line_count": 1747,
"max_line_length": 79,
"avg_line_length": 31.942186605609617,
"alnum_prop": 0.5829077289751448,
"repo_name": "redhat-openstack/glance",
"id": "daa12485670bcd7ef05d7ebc464d206ce3224ca1",
"size": "56467",
"binary": false,
"copies": "1",
"ref": "refs/heads/f22-patches",
"path": "glance/db/simple/api.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "PLpgSQL",
"bytes": "12183"
},
{
"name": "Python",
"bytes": "3304893"
},
{
"name": "Shell",
"bytes": "7168"
}
],
"symlink_target": ""
}
|
import re
import json
import requests
from functools import update_wrapper
from django import forms
from django.conf import settings
from django.views.generic import View
from django.core.urlresolvers import reverse
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.decorators import method_decorator
from django.views.decorators.csrf import csrf_exempt
from sentry.utils.http import absolute_uri
from sentry.models import Organization, Team, User, OrganizationMember, \
GroupAssignee
from .utils import JsonResponse, IS_DEBUG
from .models import Tenant, Context
from . import mentions
from .plugin import enable_plugin_for_tenant, disable_plugin_for_tenant, \
ADDON_HOST_IDENT
from .cards import make_event_notification, make_generic_notification, \
make_subscription_update_notification, ICON, ICON2X
_link_pattern = re.escape(settings.SENTRY_URL_PREFIX) \
.replace('https\\:', 'https?\\:') + '/'
_link_re = re.compile(_link_pattern +
r'(?P<org>[^/]+)/(?P<proj>[^/]+)/group/'
r'(?P<group>[^/]+)(/events/(?P<event>[^/]+)|/?)')
ADDON_KEY = getattr(settings, 'HIPCHAT_SENTRY_AC_KEY', None)
if ADDON_KEY is None:
ADDON_KEY = '.'.join(ADDON_HOST_IDENT.split('.')[::-1]) + '.hipchat-ac'
class DescriptorView(View):
def get(self, request):
return JsonResponse({
'key': ADDON_KEY,
'name': 'Sentry for HipChat',
'description': 'Sentry integration for HipChat.',
'links': {
'self': absolute_uri(reverse('sentry-hipchat-ac-descriptor')),
},
'icon': {
'url': ICON,
},
'capabilities': {
'installable': {
'allowRoom': True,
'allowGlobal': False,
'callbackUrl': absolute_uri(reverse(
'sentry-hipchat-ac-installable')),
},
'hipchatApiConsumer': {
'scopes': ['send_notification', 'view_room'],
},
'configurable': {
'url': absolute_uri(reverse('sentry-hipchat-ac-config')),
},
'webhook': [
{
'event': 'room_message',
'url': absolute_uri(reverse(
'sentry-hipchat-ac-link-message')),
'pattern': _link_pattern,
'authentication': 'jwt',
},
],
'webPanel': [
{
'key': 'sentry.sidebar.event-details',
'name': {
'value': 'Sentry Issue Details',
},
'location': 'hipchat.sidebar.right',
'url': absolute_uri(reverse(
'sentry-hipchat-ac-event-details')),
},
{
'key': 'sentry.sidebar.recent-events',
'name': {
'value': 'Recent Sentry Issues',
},
'location': 'hipchat.sidebar.right',
'url': absolute_uri(reverse(
'sentry-hipchat-ac-recent-events')),
},
],
'action': [
{
'key': 'message.sentry.event-details',
'name': {
'value': 'Show details',
},
'target': 'sentry-event-details-glance',
'location': 'hipchat.message.action',
'conditions': [
{
'condition': 'card_matches',
'params': {
'metadata': [
{'attr': 'sentry_message_type',
'eq': 'event'},
]
}
}
],
},
{
'key': 'message.sentry.assign-event',
'name': {
'value': 'Assign',
},
'target': 'sentry-assign-dialog',
'location': 'hipchat.message.action',
'conditions': [
{
'condition': 'card_matches',
'params': {
'metadata': [
{'attr': 'sentry_message_type',
'eq': 'event'},
]
}
}
],
}
],
'dialog': [
{
'key': 'sentry-assign-dialog',
'title': {
'value': 'Assign Issue',
},
'url': absolute_uri(reverse(
'sentry-hipchat-assign-event')),
'options': {
'size': {
'height': '400px',
'width': '600px',
},
},
}
],
'glance': [
# Invisible dummy glance for normal sidebars
{
'name': {
'value': 'Sentry Issue Details',
},
'key': 'sentry-event-details-glance',
'target': 'sentry.sidebar.event-details',
'icon': {
'url': ICON,
'url@2x': ICON2X,
},
'conditions': [
{
'condition': 'glance_matches',
"params": {
"metadata": [
{"attr": "this_is_a_dummy",
"eq": True}
]
}
}
],
},
{
'name': {
'value': 'Sentry',
},
'queryUrl': absolute_uri(reverse(
'sentry-hipchat-ac-recent-events-glance')),
'key': 'sentry-recent-events-glance',
'target': 'sentry.sidebar.recent-events',
'icon': {
'url': ICON,
'url@2x': ICON2X,
},
'conditions': [],
}
],
},
'vendor': {
'url': 'https://www.getsentry.com/',
'name': 'Sentry',
}
})
class InstallableView(View):
@method_decorator(csrf_exempt)
def dispatch(self, *args, **kwargs):
return View.dispatch(self, *args, **kwargs)
def post(self, request):
data = json.loads(request.body) or {}
room_id = data.get('roomId', None)
if room_id is None:
return HttpResponse('This add-on can only be installed in '
'individual rooms.', status=400)
capdoc = requests.get(data['capabilitiesUrl'], timeout=10).json()
if capdoc['links'].get('self') != data['capabilitiesUrl']:
return HttpResponse('Mismatch on capabilities URL',
status=400)
# Make sure we clean up an old existing tenant if we have one.
try:
old_tenant = Tenant.objects.get(pk=data['oauthId'])
except Tenant.DoesNotExist:
pass
else:
old_tenant.delete()
tenant = Tenant.objects.create(
id=data['oauthId'],
room_id=room_id,
secret=data['oauthSecret'],
capdoc=capdoc,
)
tenant.update_room_info()
return HttpResponse('', status=201)
def delete(self, request, oauth_id):
try:
tenant = Tenant.objects.get(pk=oauth_id)
tenant.delete()
except Tenant.DoesNotExist:
pass
return HttpResponse('', status=201)
class GrantAccessForm(forms.Form):
orgs = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,
label='Organizations',
required=False)
def __init__(self, tenant, request):
self.user = request.user
self.tenant = tenant
self.all_orgs = Organization.objects.get_for_user(request.user)
org_choices = [(str(x.id), x.name) for x in self.all_orgs]
if request.method == 'POST':
forms.Form.__init__(self, request.POST)
else:
forms.Form.__init__(self)
self.fields['orgs'].choices = org_choices
def clean_orgs(self):
rv = [org for org in self.all_orgs if str(org.id) in
self.cleaned_data['orgs']]
if not rv:
raise forms.ValidationError('You need to select at least one '
'organization to give access to.')
return rv
def save_changes(self):
self.tenant.auth_user = self.user
self.tenant.organizations = self.cleaned_data['orgs']
self.tenant.save()
notify_tenant_added(self.tenant)
class ProjectSelectForm(forms.Form):
projects = forms.MultipleChoiceField(widget=forms.CheckboxSelectMultiple,
label='Projects', required=False)
def __init__(self, tenant, request):
self.tenant = tenant
project_choices = []
self.projects_by_id = {}
for org in tenant.organizations.all():
teams = Team.objects.get_for_user(org, tenant.auth_user,
with_projects=True)
for team, projects in teams:
for project in projects:
project_choices.append((str(project.id), '%s | %s / %s' % (
org.name, team.name, project.name)))
self.projects_by_id[str(project.id)] = project
project_choices.sort(key=lambda x: x[1].lower())
if request.method == 'POST':
forms.Form.__init__(self, request.POST)
else:
forms.Form.__init__(self, initial={
'projects': [str(x.id) for x in tenant.projects.all()],
})
self.fields['projects'].choices = project_choices
def clean_projects(self):
return set(self.cleaned_data['projects'])
def save_changes(self):
new_projects = []
removed_projects = []
for project_id, project in self.projects_by_id.iteritems():
if project_id in self.cleaned_data['projects']:
if enable_plugin_for_tenant(project, self.tenant):
new_projects.append(project)
else:
if disable_plugin_for_tenant(project, self.tenant):
removed_projects.append(project)
if new_projects or removed_projects:
with Context.for_tenant(self.tenant) as ctx:
ctx.send_notification(**make_subscription_update_notification(
new_projects, removed_projects))
if removed_projects:
mentions.clear_project_mentions(
self.tenant, removed_projects)
ctx.push_recent_events_glance()
def webhook(f):
@csrf_exempt
def new_f(request, *args, **kwargs):
data = json.loads(request.body) or {}
with Context.for_request(request, data) as context:
return f(request, context, data, *args, **kwargs)
return update_wrapper(new_f, f)
def with_context(f):
def new_f(request, *args, **kwargs):
with Context.for_request(request) as context:
return f(request, context, *args, **kwargs)
return update_wrapper(new_f, f)
def allow_frame(f):
def new_f(request, *args, **kwargs):
resp = f(request, *args, **kwargs)
# put something here so that sentry does not overwrite it
# with deny.
resp['X-Frame-Options'] = 'allow'
return resp
return update_wrapper(new_f, f)
def cors(f):
def new_f(request, *args, **kwargs):
origin = request.META.get('HTTP_ORIGIN')
resp = f(request, *args, **kwargs)
resp['Access-Control-Allow-Origin'] = origin
resp['Access-Control-Request-Method'] = 'GET, HEAD, OPTIONS'
resp['Access-Control-Allow-Headers'] = 'X-Requested-With'
resp['Access-Control-Allow-Credentials'] = 'true'
resp['Access-Control-Max-Age'] = '1728000'
return resp
return update_wrapper(new_f, f)
@allow_frame
@with_context
def configure(request, context):
# XXX: this is a bit terrible because it means the login url is
# already set at the time we visit this page. This can have some
# stupid consequences when opening up the login page seaprately in a
# different tab later. Ideally we could pass the login url through as
# a URL parameter instead but this is currently not securely possible.
request.session['_next'] = request.get_full_path()
grant_form = None
project_select_form = None
if context.tenant.auth_user is None and \
request.user.is_authenticated():
grant_form = GrantAccessForm(context.tenant, request)
if request.method == 'POST' and grant_form.is_valid():
grant_form.save_changes()
return HttpResponseRedirect(request.get_full_path())
elif context.tenant.auth_user is not None:
project_select_form = ProjectSelectForm(context.tenant, request)
if request.method == 'POST' and project_select_form.is_valid():
project_select_form.save_changes()
return HttpResponseRedirect(request.get_full_path())
return render(request, 'sentry_hipchat_ac/configure.html', {
'context': context,
'tenant': context.tenant,
'current_user': request.user,
'grant_form': grant_form,
'project_select_form': project_select_form,
'available_orgs': list(context.tenant.organizations.all()),
'hipchat_debug': IS_DEBUG,
})
@allow_frame
@with_context
def sign_out(request, context):
tenant = context.tenant
cfg_url = '%s?signed_request=%s' % (
reverse('sentry-hipchat-ac-config'),
context.signed_request
)
if tenant.auth_user is None or 'no' in request.POST:
return HttpResponseRedirect(cfg_url)
elif request.method == 'POST':
tenant.clear()
notify_tenant_removal(tenant)
return HttpResponseRedirect(cfg_url)
return render(request, 'sentry_hipchat_ac/sign_out.html', {
'context': context,
'tenant': tenant,
})
@cors
@allow_frame
@with_context
def recent_events_glance(request, context):
return JsonResponse(context.get_recent_events_glance())
@allow_frame
@with_context
def event_details(request, context):
event = None
group = None
interface_data = {}
tags = []
event_id = request.GET.get('event')
bad_event = False
if event_id is not None:
event = context.get_event(event_id)
if event is None:
bad_event = True
else:
group = event.group
tags = [(k.split(':', 1)[1] if k.startswith('sentry:') else k,
v) for k, v in event.get_tags()]
interface_data.update(
http=event.interfaces.get('sentry.interfaces.Http'),
user=event.interfaces.get('sentry.interfaces.User'),
)
exc = event.interfaces.get('sentry.interfaces.Exception')
if exc is not None:
interface_data['exc'] = exc
interface_data['exc_as_string'] = exc.to_string(event)
return render(request, 'sentry_hipchat_ac/event_details.html', {
'context': context,
'event': event,
'from_recent': request.GET.get('from_recent') == 'yes',
'group': group,
'interfaces': interface_data,
'bad_event': bad_event,
'tags': tags,
})
@allow_frame
@with_context
def assign_event(request, context):
event = None
project = None
member_list = []
assigned_to = None
dismiss_dialog = False
event_id = request.GET.get('event')
if event_id:
event = context.get_event(event_id)
if event is not None:
project = event.project
member_list = sorted(set(User.objects.filter(
is_active=True,
sentry_orgmember_set__organization=project.organization,
sentry_orgmember_set__id__in=OrganizationMember.objects.filter(
organizationmemberteam__is_active=True,
organizationmemberteam__team=project.team,
).values('id')
).distinct()[:1000]), key=lambda x: x.email)
assigned_to = GroupAssignee.objects.filter(
group=event.group
).first()
if request.method == 'POST':
if 'assign' in request.POST:
assignee = next((
x for x in member_list
if str(x.id) == request.POST['assigned_to']), None)
if assignee is not None:
GroupAssignee.objects.assign(event.group, assignee)
elif 'deassign' in request.POST:
GroupAssignee.objects.deassign(event.group)
dismiss_dialog = True
return render(request, 'sentry_hipchat_ac/assign_event.html', {
'context': context,
'event': event,
'project': project,
'member_list': member_list,
'assigned_to': assigned_to,
'dismiss_dialog': dismiss_dialog,
})
@allow_frame
@with_context
def recent_events(request, context):
events = mentions.get_recent_mentions(context.tenant)
return render(request, 'sentry_hipchat_ac/recent_events.html', {
'context': context,
'events': events,
})
@webhook
def on_link_message(request, context, data):
match = _link_re.search(data['item']['message']['message'])
if match is not None:
params = match.groupdict()
event = context.get_event_from_url_params(
group_id=params['group'],
event_id=params['event'],
slug_vars={'org_slug': params['org'],
'proj_slug': params['proj']}
)
if event is not None:
context.send_notification(**make_event_notification(
event.group, event, context.tenant, new=False,
event_target=params['event'] is not None))
mentions.mention_event(
project=event.project,
group=event.group,
tenant=context.tenant,
event=params['event'] and event or None,
)
context.push_recent_events_glance()
return HttpResponse('', status=204)
def notify_tenant_added(tenant):
with Context.for_tenant(tenant) as ctx:
ctx.send_notification(**make_generic_notification(
'The Sentry Hipchat integration was associated with this room.',
color='green'))
ctx.push_recent_events_glance()
def notify_tenant_removal(tenant):
with Context.for_tenant(tenant) as ctx:
ctx.send_notification(**make_generic_notification(
'The Sentry Hipchat integration was disassociated with this room.',
color='red'))
ctx.push_recent_events_glance()
|
{
"content_hash": "316ea7c3b17b1d5b2d47c51d8ccdbc7b",
"timestamp": "",
"source": "github",
"line_count": 569,
"max_line_length": 79,
"avg_line_length": 35.99472759226713,
"alnum_prop": 0.4937258922904155,
"repo_name": "getsentry/sentry-hipchat-ac",
"id": "f37189c2542fdb614c0e87fcb05e9a27a9ec7146",
"size": "20481",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "sentry_hipchat_ac/views.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "CSS",
"bytes": "5525"
},
{
"name": "HTML",
"bytes": "14246"
},
{
"name": "Makefile",
"bytes": "104"
},
{
"name": "Python",
"bytes": "80305"
}
],
"symlink_target": ""
}
|
"""
The setup and build script for the pyrant library.
"""
import os
from setuptools import setup, find_packages
from _version import version
readme = open(os.path.join(os.path.dirname(__file__), 'README')).read()
setup(
name = "pyrant",
version = version,
url = 'http://code.google.com/p/pyrant/',
license = 'Apache License 2.0',
description = 'A python wrapper around Tokyo Tyrant',
long_description = readme,
author = 'Martin Conte Mac Donell',
author_email = 'Reflejo@gmail.com',
maintainer = 'Andrey Mikhaylenko',
maintainer_email = 'andy@neithere.net',
packages = find_packages(),
install_requires = ['setuptools'],
include_package_data = True,
classifiers = [
'Intended Audience :: Developers',
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Database :: Front-Ends',
],
# release sanity check
test_suite = 'nose.collector',
)
|
{
"content_hash": "f4c41156c3d4a1d750845b8a1cbd3b65",
"timestamp": "",
"source": "github",
"line_count": 37,
"max_line_length": 71,
"avg_line_length": 30.56756756756757,
"alnum_prop": 0.6463306808134395,
"repo_name": "neithere/pyrant",
"id": "27c00cf4f28c79bca4f9a7d40039b7a691e18702",
"size": "1772",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "setup.py",
"mode": "33261",
"license": "apache-2.0",
"language": [
{
"name": "Lua",
"bytes": "78"
},
{
"name": "Python",
"bytes": "119553"
},
{
"name": "Shell",
"bytes": "165"
}
],
"symlink_target": ""
}
|
"""A wrapper around requests that records all requests made with it.
Supports get, put, post, delete and request
all calls return an instance of HarvesterResponse
"""
from __future__ import absolute_import
import json
import time
import logging
import functools
from datetime import datetime
import six
import furl
import requests
from cassandra.cqlengine import columns, models
from requests.structures import CaseInsensitiveDict
from scrapi import events
from scrapi import database
from scrapi import settings
logger = logging.getLogger(__name__)
logging.getLogger('cqlengine.cql').setLevel(logging.WARN)
@database.register_model
class HarvesterResponse(models.Model):
"""A parody of requests.response but stored in cassandra
Should reflect all methods of a response object
Contains an additional field time_made, self-explanitory
"""
__table_name__ = 'responses'
method = columns.Text(primary_key=True)
url = columns.Text(primary_key=True, required=True)
# Raw request data
ok = columns.Boolean()
content = columns.Bytes()
encoding = columns.Text()
headers_str = columns.Text()
status_code = columns.Integer()
time_made = columns.DateTime(default=datetime.now)
def json(self):
return json.loads(self.content)
@property
def headers(self):
return CaseInsensitiveDict(json.loads(self.headers_str))
@property
def text(self):
return six.u(self.content)
def _maybe_load_response(method, url):
try:
return HarvesterResponse.get(url=url.lower(), method=method)
except HarvesterResponse.DoesNotExist:
return None
def record_or_load_response(method, url, throttle=None, force=False, params=None, expected=(200,), **kwargs):
resp = _maybe_load_response(method, url)
if not force and resp and resp.ok:
logger.info('Return recorded response from "{}"'.format(url))
return resp
if force:
logger.warning('Force updating request to "{}"'.format(url))
else:
logger.info('Making request to "{}"'.format(url))
maybe_sleep(throttle)
response = requests.request(method, url, **kwargs)
if not response.ok:
events.log_to_sentry('Got non-ok response code.', url=url, method=method)
if isinstance(response.content, six.text_type):
response.content = response.content.encode('utf8')
if not resp:
return HarvesterResponse(
url=url.lower(),
method=method,
ok=response.ok,
content=response.content,
encoding=response.encoding,
status_code=response.status_code,
headers_str=json.dumps(dict(response.headers))
).save()
logger.warning('Skipped recorded response from "{}"'.format(url))
return resp.update(
ok=(response.ok or response.status_code in expected),
content=response.content,
encoding=response.encoding,
status_code=response.status_code,
headers_str=json.dumps(dict(response.headers))
).save()
def maybe_sleep(sleepytime):
# exists so that this alone can be mocked in tests
if sleepytime:
time.sleep(sleepytime)
def request(method, url, params=None, **kwargs):
"""Make a recorded request or get a record matching method and url
:param str method: Get, Put, Post, or Delete
:param str url: Where to make the request to
:param bool force: Whether or not to force the new request to be made
:param int throttle: A time in seconds to sleep before making requests
:param dict kwargs: Addition keywords to pass to requests
"""
if params:
url = furl.furl(url).set(args=params).url
logger.info(url)
if settings.RECORD_HTTP_TRANSACTIONS:
return record_or_load_response(method, url, **kwargs)
logger.info('Making request to "{}"'.format(url))
throttle = kwargs.pop('throttle', 0)
maybe_sleep(throttle)
# Need to prevent force from being passed to real requests module
kwargs.pop('force', None)
return requests.request(method, url, **kwargs)
get = functools.partial(request, 'get')
put = functools.partial(request, 'put')
post = functools.partial(request, 'post')
delete = functools.partial(request, 'delete')
|
{
"content_hash": "ffea7d1c1ba6f621a96caf285d68f504",
"timestamp": "",
"source": "github",
"line_count": 143,
"max_line_length": 109,
"avg_line_length": 29.923076923076923,
"alnum_prop": 0.6847394250993223,
"repo_name": "ostwald/scrapi",
"id": "e6b7d91c1d5f6fd6ca427d86385d2e37063b2a18",
"size": "4279",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "scrapi/requests.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "255919"
}
],
"symlink_target": ""
}
|
from collections import OrderedDict
from typing import Dict, Type
from .base import KeywordPlanIdeaServiceTransport
from .grpc import KeywordPlanIdeaServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[KeywordPlanIdeaServiceTransport]]
_transport_registry["grpc"] = KeywordPlanIdeaServiceGrpcTransport
__all__ = (
"KeywordPlanIdeaServiceTransport",
"KeywordPlanIdeaServiceGrpcTransport",
)
|
{
"content_hash": "51eb42e79a2641c653df7bf78698a3a9",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 65,
"avg_line_length": 29.5625,
"alnum_prop": 0.7970401691331924,
"repo_name": "googleads/google-ads-python",
"id": "7bd34e5abf62a49e481ba7bfce111261393c3cac",
"size": "1073",
"binary": false,
"copies": "3",
"ref": "refs/heads/main",
"path": "google/ads/googleads/v10/services/services/keyword_plan_idea_service/transports/__init__.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "23399881"
}
],
"symlink_target": ""
}
|
from __future__ import division
import ConfigParser
import time
import numpy as np
import geovidcorpus
import lda.lda as lda
import util
exp_id = time.time()
eta_list = [0.1]
alpha_list = [0.001]
num_topics_list = [20]
niter = 30
burn = 20
lag = 0
cv_folds = 5
def main():
config = ConfigParser.RawConfigParser()
config.read('app.config')
filename = 'data/%s' % config.get('data','filename')
data = util.VideoData.from_csv(filename, filter_single=True)
corpus = np.array(np.transpose(data.counts), dtype='int32')
print "Loaded %d videos in %d countries" % corpus.shape
results = list()
for eta in eta_list:
for alpha in alpha_list:
for num_topics in num_topics_list:
print "Running cross validation with eta=%.4f alpha=%.4f K=%d" % (eta, alpha, num_topics)
folds = data.cross_validation_sets(cv_folds)
perplexity = list()
for f in range(cv_folds):
print "Cross validating %d/%d:" % (f, len(folds))
print "Initializing LDA model %f %f %d" % (eta, alpha, num_topics)
start = time.time()
fold_data = util.VideoData(folds.get_fold_training(f), proto=data)
test_corpus = np.array(data.rows_to_counts(folds.get_fold_test(f)).transpose(), dtype='int32')
model = lda.LdaModel(corpus, num_topics, alpha=alpha, eta=eta, burn=burn, lag=lag)
print "Initialization took %f seconds" % (time.time() - start)
print "Creating cross-validation folds"
for i in range(niter):
start = time.time()
print 'Iteration %d' % i
print ' doing E-step'
model.e_step()
print ' complete in %f seconds' % (time.time() - start)
# Record parameters
perplexity = model.perplexity(test_corpus)
results.append(
(eta, alpha, num_topics, model.eta.sum()/model.eta.shape[0], model.alpha.sum()/model.alpha.shape[0], perplexity)
)
# Write topics
topics = sorted(enumerate(model.alpha), key=lambda x: x[1], reverse=True)
beta = model.beta()
clusters = list()
for k, alphak in topics:
countries = sorted(enumerate(beta[k,:]), key=lambda x: x[1], reverse=True)
for country_id, weight in countries:
name = data.country_lookup.id2tok[country_id]
clusters.append((k, alphak, name, weight))
filename = 'topics-eta%f-alpha%f-k%d-fold%d' % (eta, alpha, num_topics, f)
util.write_results_csv('findcvparams', exp_id, filename, clusters, ('topic', 'alpha_k', 'country', 'weight'))
util.write_results_csv('findcvparams', exp_id, 'params', results, ('eta0', 'alpha0', 'topics', 'mean final eta', 'mean final alpha', 'perplexity'))
if __name__ == '__main__':
main()
|
{
"content_hash": "848d8c5e8b40770a371f7312b99e09e2",
"timestamp": "",
"source": "github",
"line_count": 70,
"max_line_length": 151,
"avg_line_length": 45.6,
"alnum_prop": 0.5350877192982456,
"repo_name": "c4fcm/WhatWeWatch-Analysis",
"id": "37cd7ce349d04e2f2765c3bbe41bad7d19371f3e",
"size": "3192",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "findcvparams.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Matlab",
"bytes": "2228"
},
{
"name": "Python",
"bytes": "133478"
},
{
"name": "R",
"bytes": "2048"
}
],
"symlink_target": ""
}
|
from .resource import Resource
class NetworkSecurityGroup(Resource):
"""NetworkSecurityGroup resource.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource Identifier.
:type id: str
:ivar name: Resource name.
:vartype name: str
:ivar type: Resource type.
:vartype type: str
:param location: Resource location.
:type location: str
:param tags: Resource tags.
:type tags: dict[str, str]
:param security_rules: A collection of security rules of the network
security group.
:type security_rules:
list[~azure.mgmt.network.v2015_06_15.models.SecurityRule]
:param default_security_rules: The default security rules of network
security group.
:type default_security_rules:
list[~azure.mgmt.network.v2015_06_15.models.SecurityRule]
:param network_interfaces: A collection of references to network
interfaces.
:type network_interfaces:
list[~azure.mgmt.network.v2015_06_15.models.NetworkInterface]
:param subnets: A collection of references to subnets.
:type subnets: list[~azure.mgmt.network.v2015_06_15.models.Subnet]
:param resource_guid: The resource GUID property of the network security
group resource.
:type resource_guid: str
:param provisioning_state: The provisioning state of the public IP
resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:type provisioning_state: str
:param etag: A unique read-only string that changes whenever the resource
is updated.
:type etag: str
"""
_validation = {
'name': {'readonly': True},
'type': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'security_rules': {'key': 'properties.securityRules', 'type': '[SecurityRule]'},
'default_security_rules': {'key': 'properties.defaultSecurityRules', 'type': '[SecurityRule]'},
'network_interfaces': {'key': 'properties.networkInterfaces', 'type': '[NetworkInterface]'},
'subnets': {'key': 'properties.subnets', 'type': '[Subnet]'},
'resource_guid': {'key': 'properties.resourceGuid', 'type': 'str'},
'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'},
'etag': {'key': 'etag', 'type': 'str'},
}
def __init__(self, id=None, location=None, tags=None, security_rules=None, default_security_rules=None, network_interfaces=None, subnets=None, resource_guid=None, provisioning_state=None, etag=None):
super(NetworkSecurityGroup, self).__init__(id=id, location=location, tags=tags)
self.security_rules = security_rules
self.default_security_rules = default_security_rules
self.network_interfaces = network_interfaces
self.subnets = subnets
self.resource_guid = resource_guid
self.provisioning_state = provisioning_state
self.etag = etag
|
{
"content_hash": "ae00569f56f2593c74bbf96ede6a99cf",
"timestamp": "",
"source": "github",
"line_count": 73,
"max_line_length": 203,
"avg_line_length": 43.35616438356164,
"alnum_prop": 0.6527646129541864,
"repo_name": "AutorestCI/azure-sdk-for-python",
"id": "7213d08bb536b47fcb746ad03fc783642ae573a7",
"size": "3639",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "azure-mgmt-network/azure/mgmt/network/v2015_06_15/models/network_security_group.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "34619070"
}
],
"symlink_target": ""
}
|
from openprocurement.tender.core.utils import optendersresource
from openprocurement.tender.openeu.views.award_document import TenderAwardDocumentResource as TenderEUAwardDocumentResource
@optendersresource(name='esco.EU:Tender Award Documents',
collection_path='/tenders/{tender_id}/awards/{award_id}/documents',
path='/tenders/{tender_id}/awards/{award_id}/documents/{document_id}',
procurementMethodType='esco.EU',
description="Tender ESCO EU Award documents")
class TenderESCOEUAwardDocumentResource(TenderEUAwardDocumentResource):
""" Tender ESCO EU Award Document Resource """
|
{
"content_hash": "e80fc18fec67c8a67c53d8a94812873f",
"timestamp": "",
"source": "github",
"line_count": 11,
"max_line_length": 123,
"avg_line_length": 60.45454545454545,
"alnum_prop": 0.7263157894736842,
"repo_name": "Scandie/openprocurement.tender.esco",
"id": "2e37ae001bda3e6284f2f73bc1f1b468fd44e7c7",
"size": "689",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "openprocurement/tender/esco/views/award_document.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Python",
"bytes": "358972"
}
],
"symlink_target": ""
}
|
from __future__ import absolute_import
import os
from chaco.plot_label import PlotLabel
from enable.component_editor import ComponentEditor as EnableComponentEditor
from traits.api import Property, Event, cached_property, Any
from traitsui.api import View, UItem
from pychron.core.helpers.iterfuncs import groupby_group_id
from pychron.pipeline.plot.editors.base_editor import BaseEditor
from pychron.pipeline.plot.figure_container import FigureContainer
class WarningLabel(PlotLabel):
def _layout_as_overlay(self, size=None, force=False):
self.x = self.component.x + self.component.width / 2
self.y = self.component.y + self.component.height / 2
class GraphEditor(BaseEditor):
refresh_needed = Event
save_needed = Event
component = Property(depends_on='refresh_needed')
basename = ''
figure_model = Any
figure_container = Any
@property
def analyses(self):
return self.items
def save_file(self, path, force_layout=True, dest_box=None):
_, tail = os.path.splitext(path)
if tail not in ('.pdf', '.png'):
path = '{}.pdf'.format(path)
c = self.component
'''
chaco becomes less responsive after saving if
use_backbuffer is false and using pdf
'''
from reportlab.lib.pagesizes import letter
c.do_layout(size=letter, force=force_layout)
_, tail = os.path.splitext(path)
if tail == '.pdf':
from pychron.core.pdf.save_pdf_dialog import myPdfPlotGraphicsContext
gc = myPdfPlotGraphicsContext(filename=path,
dest_box=dest_box)
gc.render_component(c, valign='center')
gc.save()
else:
from chaco.plot_graphics_context import PlotGraphicsContext
gc = PlotGraphicsContext((int(c.outer_width), int(c.outer_height)))
gc.render_component(c)
gc.save(path)
def set_items(self, ans, is_append=False, refresh=False, compress=True):
if is_append:
self.items.extend(ans)
else:
self.items = ans
if self.items:
self._set_name()
if compress:
self._compress_groups()
if refresh:
print('set items refresh')
self.refresh_needed = True
def _compress_groups(self):
ans = self.items
if ans:
for i, (gid, analyses) in enumerate(groupby_group_id(ans)):
for ai in analyses:
ai.group_id = i
@cached_property
def _get_component(self):
if self.items:
comp = self._component_factory()
else:
comp = self._no_component_factory()
return comp
def _component_factory(self):
raise NotImplementedError
def recalculate(self, model):
pass
def _get_component_hook(self, *args, **kw):
pass
def _no_component_factory(self):
container = self.figure_container
if not container:
container = FigureContainer()
self.figure_container = container
component = self.figure_container.component
w = WarningLabel(text='No Analyses',
font='Helvetica 36',
component=component)
component.overlays.append(w)
return component
def _component_factory(self):
raise NotImplementedError
def get_component_view(self):
return UItem('component',
style='custom',
# width=650,
editor=EnableComponentEditor())
def traits_view(self):
v = View(self.get_component_view(),
resizable=True)
return v
# ============= EOF =============================================
|
{
"content_hash": "d449e107c9c040a05f3bbdf386064d55",
"timestamp": "",
"source": "github",
"line_count": 130,
"max_line_length": 81,
"avg_line_length": 29.8,
"alnum_prop": 0.5813113061435209,
"repo_name": "UManPychron/pychron",
"id": "a380c43cfc877abf07a2ed58f00773bdc772da11",
"size": "4674",
"binary": false,
"copies": "1",
"ref": "refs/heads/develop",
"path": "pychron/pipeline/plot/editors/graph_editor.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "131"
},
{
"name": "C++",
"bytes": "3706"
},
{
"name": "CSS",
"bytes": "279"
},
{
"name": "Fortran",
"bytes": "455875"
},
{
"name": "HTML",
"bytes": "40346"
},
{
"name": "Mako",
"bytes": "412"
},
{
"name": "Processing",
"bytes": "11421"
},
{
"name": "Python",
"bytes": "10234954"
},
{
"name": "Shell",
"bytes": "10753"
}
],
"symlink_target": ""
}
|
import findLibs
from imagedb import imagedb
from imagedb import jobColumns, flickrColumns
from paths import imageBase
from flickrInterface import flickrObject
from facebookInterface import facebookObject
from tumblrInterface import tumblrObject
from twitterInterface import twitterObject
import web
from web import form
import os,sys
import json
import threading
import datetime
import time
def postPendingJobs():
jobIdList = map(lambda x: x[0], imagedb.getJobsToDo())
for jobId in jobIdList:
postJob(jobId)
def postJob(jobId):
jobData = imagedb.getJob(jobId, jobColumns)[0]
jobDict = {}
#TODO: imageid == jobid may not always be true
jobDict["imageId"] = jobId
for idx, val in enumerate(jobColumns):
data = str(jobData[idx])
if data == "None":
data = None
jobDict[val] = data
#cleanup
jobDict["fileurl"] = os.path.join(imageBase, jobDict["dburl"])
jobDict["width"] = int(jobDict["width"])
jobDict["height"] = int(jobDict["height"])
jobDict["jobDict"] = json.loads(jobDict["jobDict"])
if jobDict["tags"] != None:
jobDict["tags"] = jobDict["tags"].split(",")
else:
jobDict["tags"] = []
if jobDict["flickrSets"] != None:
jobDict["flickrSets"] = jobDict["flickrSets"].split(",")
if jobDict["flickrGroups"] != None:
jobDict["flickrGroups"] = jobDict["flickrGroups"].split(",")
jobDict["geoCode"] = int(jobDict["geoCode"])
jobDict["latitude"] = float(jobDict["latitude"])
jobDict["longitude"] = float(jobDict["longitude"])
serviceDict = jobDict["jobDict"]
if "jobStatus" not in serviceDict.keys():
jobStatus = {"flickr":False, "facebook": False, "tumblr": False, "twitter": False}
serviceDict["jobStatus"] = jobStatus
print "GOING TO POST JOB: " + str(jobId)
print serviceDict
if serviceDict["flickr"] != True:
raise Exception("need at least flickr to work!")
else:
if flickrObject.authorised == False:
raise Exception("need at least flickr to work!")
if serviceDict["jobStatus"]["flickr"] == False:
try:
postFlickrImage(jobDict)
serviceDict["jobStatus"]["flickr"] = True
imagedb.updateJobWorking(jobId, "queued", json.dumps(serviceDict))
except:
print "Flickr error:", sys.exc_info()[0]
return
flickrData = imagedb.getJob(jobId, flickrColumns)[0]
jobDict["flickrurl"] = flickrData["url"]
jobDict["flickrshorturl"] = flickrData["shorturl"]
jobDict["flickrImageThumbUrl"] = flickrData["imageThumbUrl"]
jobDict["flickrImageLargeUrl"] = flickrData["imageLargeUrl"]
if serviceDict["facebook"]:
if facebookObject.authorised:
if serviceDict["jobStatus"]["facebook"] == False:
try:
postFacebookPost(jobDict)
serviceDict["jobStatus"]["facebook"] = True
imagedb.updateJobWorking(jobId, "queued", json.dumps(serviceDict))
except:
print "Facebook error:", sys.exc_info()[0]
return
if serviceDict["tumblr"]:
if tumblrObject.authorised:
if serviceDict["jobStatus"]["tumblr"] == False:
try:
postTumblrPost(jobDict)
serviceDict["jobStatus"]["tumblr"] = True
imagedb.updateJobWorking(jobId, "queued", json.dumps(serviceDict))
except:
print "Tumblr error:", sys.exc_info()[0]
return
if serviceDict["twitter"]:
if twitterObject.authorised:
if serviceDict["jobStatus"]["twitter"] == False:
try:
postTwitterPost(jobDict)
serviceDict["jobStatus"]["twitter"] = True
imagedb.updateJobWorking(jobId, "queued", json.dumps(serviceDict))
except:
print "Twitter error:", sys.exc_info()[0]
return
imagedb.updateJobWorking(jobId, "done", json.dumps(serviceDict))
print "job complete! id:" + str(jobId)
def postTwitterPost(jobDict):
twitterObject.postImagePost(jobDict)
def postTumblrPost(jobDict):
tumblrObject.postImagePost(jobDict)
def postFacebookPost(jobDict):
facebookObject.postImagePost(jobDict)
def postFlickrImage(jobDict):
#do it!
try:
photoid = flickrObject.postImage(jobDict)
except:
raise Exception("photo upload error, giving up")
if jobDict["geoCode"] != 0:
try:
flickrObject.setLocation(photoid, jobDict["latitude"], jobDict["longitude"])
except:
print "ERROR IN SETTING LOCATION - you may need to set pref at http://www.flickr.com/account/geo/privacy/"
try:
flickrObject.sendToGroups(photoid, jobDict["flickrGroups"])
except:
print "ERROR IN SENDING TO FLICKR GROUPS"
try:
flickrObject.sendToSets(photoid, jobDict["flickrSets"])
except:
print "ERROR IN SENDING TO FLICKR SETS"
try:
sizes = flickrObject.getPhotoImageSizes(photoid)
except:
raise Exception("photo uploaded but can't get sizes, giving up")
imagethumburl = ""
imagelargeurl = ""
for size in sizes:
if size["label"] == "Large Square":
imagethumburl = size["source"]
elif size["label"] == "Large":
imagelargeurl = size["source"]
infoDict = flickrObject.getPhotoInfo(photoid)
url = infoDict["urls"]["url"][0]["_content"]
shorturl = flickrObject.getShortURL(photoid)
jobDict["flickrurl"] = url
jobDict["flickrshorturl"] = shorturl
jobDict["flickrImageThumbUrl"] = imagethumburl
jobDict["flickrImageLargeUrl"] = imagelargeurl
imagedb.updateFlickrPostUpload(jobDict["imageId"], photoid, url, shorturl, imagethumburl, imagelargeurl)
## Run the job posting thread
minuteSeconds = 60
interval = 10 #runs every interval minutes
class RunJobs(threading.Thread):
def run(self):
while(True):
now = datetime.datetime.now()
print "Running pending jobs at time: %s" % (now)
postPendingJobs()
time.sleep(interval*minuteSeconds)
runJobsThread = RunJobs()
runJobsThread.daemon = True
runJobsThread.start()
|
{
"content_hash": "28fcff47281da7c1d67222934ebb8e70",
"timestamp": "",
"source": "github",
"line_count": 193,
"max_line_length": 112,
"avg_line_length": 30.373056994818654,
"alnum_prop": 0.6784373933810987,
"repo_name": "bhautikj/imageCatapult",
"id": "314c683407a1b21c0041cd4707d650b2cec5cf09",
"size": "6996",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "job.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "7281"
},
{
"name": "JavaScript",
"bytes": "47479"
},
{
"name": "Python",
"bytes": "87261"
}
],
"symlink_target": ""
}
|
from django import forms
from django.contrib.auth.models import Group
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Reset, Layout, Div
from .models import User
class UserForm(forms.ModelForm):
class Meta:
# Set this form to use the User model.
model = User
# Constrain the UserForm to just these fields.
fields = ("first_name", "last_name")
class CreateUserForm(forms.ModelForm):
perfil = forms.ModelChoiceField(queryset=Group.objects.all())
password = forms.CharField(widget=forms.PasswordInput())
password2 = forms.CharField(label='Repetir password',
widget=forms.PasswordInput())
def __init__(self, *args, **kwargs):
super(CreateUserForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.add_input(Submit('user_submit', 'Guardar'))
self.helper.add_input(Reset('user_reset', 'Limpiar',
css_class='btn-default'))
self.username = None
if 'instance' in kwargs and kwargs.get('instance') is not None:
self.username = kwargs['instance'].username
class Meta:
# Set this form to use the User model.
model = User
fields = ("first_name",
"last_name",
"username",
"password",
"password2",
"perfil")
def clean_username(self):
"""Check if the username already exists, and is not his email"""
username = self.cleaned_data['username']
if self.username:
if self.username != username:
if User.objects.filter(username=username).exists():
raise forms.ValidationError("El nombre de usuario ya esta en uso.")
return username
def clean(self):
password = self.cleaned_data.get('password')
password2 = self.cleaned_data.get('password2')
if password and password != password2:
raise forms.ValidationError("Las contraseñas no son iguales.")
return self.cleaned_data
|
{
"content_hash": "e47e90afcd4a292ef9e00553b3a85424",
"timestamp": "",
"source": "github",
"line_count": 66,
"max_line_length": 87,
"avg_line_length": 33.15151515151515,
"alnum_prop": 0.6078610603290676,
"repo_name": "diegoduncan21/subastas",
"id": "aab49be05d9df685b4c5c6b7b816fd1fad95143b",
"size": "2213",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "subastas_repo/users/forms.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "CSS",
"bytes": "1906"
},
{
"name": "HTML",
"bytes": "51394"
},
{
"name": "JavaScript",
"bytes": "2387"
},
{
"name": "Python",
"bytes": "66230"
}
],
"symlink_target": ""
}
|
import psutil
import qtutil
from .temporal_filter import *
from .util import custom_qt_items as cqt
from .util.plugin import PluginDefault
from .util.plugin import WidgetDefault
from .util.custom_qt_items import MyProgressDialog
class Widget(QWidget, WidgetDefault):
class Labels(WidgetDefault.Labels):
pass
class Defaults(WidgetDefault.Defaults):
pass
def __init__(self, project, plugin_position, parent=None):
super(Widget, self).__init__(parent)
if not project or not isinstance(plugin_position, int):
return
self.concat_butt = QPushButton('Concatenate')
WidgetDefault.__init__(self, project, plugin_position)
def setup_ui(self):
super().setup_ui()
self.video_list.setAcceptDrops(True)
self.video_list.setDragEnabled(True)
self.video_list.setDropIndicatorShown(True)
self.video_list.setDragDropMode(QAbstractItemView.InternalMove)
self.video_list.setDefaultDropAction(Qt.MoveAction)
self.video_list.setDragDropOverwriteMode(False)
self.vbox.addWidget(cqt.InfoWidget('Note that there is no explicit progress bar. '
'Note that videos can be dragged and dropped in the list but that the order '
'in which they are *selected* determines concatenation order. The '
'dragging and dropping is for convenience so you can organize your desired '
'order and then shift select them from top to bottom to concatenate '
'that selection in that order'))
hhbox = QHBoxLayout()
hhbox.addWidget(self.concat_butt)
self.vbox.addLayout(hhbox)
self.vbox.addStretch()
def setup_signals(self):
super().setup_signals()
self.concat_butt.clicked.connect(self.execute_primary_function)
def execute_primary_function(self, input_paths=None):
if not input_paths:
if not self.selected_videos:
return
else:
selected_videos = self.selected_videos
else:
selected_videos = input_paths
summed_filesize = 0
for path in self.selected_videos:
summed_filesize = summed_filesize + os.path.getsize(path)
available = list(psutil.virtual_memory())[1]
if summed_filesize > available:
qtutil.critical("Not enough memory. Concatenated file is of size ~"+str(summed_filesize) +\
" and available memory is: " + str(available))
raise MemoryError("Not enough memory. Concatenated file is of size ~"+str(summed_filesize) +\
" and available memory is: " + str(available))
paths = selected_videos
if len(paths) < 2:
qtutil.warning('Select multiple files to concatenate.')
return
frames = [file_io.load_file(f) for f in paths]
progress = MyProgressDialog('Concatenation', 'Concatenating files...', self)
progress.show()
progress.setValue(1)
frames = np.concatenate(frames)
progress.setValue(99)
# First one has to take the name otherwise pfs.save_projects doesn't work
filenames = [os.path.basename(path) for path in paths]
manip = 'concat-'+str(len(filenames))
output_path = pfs.save_project(paths[0], self.project, frames, manip, 'video')
pfs.refresh_list(self.project, self.video_list,
self.params[self.Labels.video_list_indices_label],
self.Defaults.list_display_type,
self.params[self.Labels.last_manips_to_display_label])
progress.close()
return [output_path]
class MyPlugin(PluginDefault):
def __init__(self, project, plugin_position):
self.name = 'Concatenation'
self.widget = Widget(project, plugin_position)
super().__init__(self.widget, self.widget.Labels, self.name)
def check_ready_for_automation(self, expected_input_number):
self.summed_filesize = 0
for path in self.widget.selected_videos:
self.summed_filesize = self.summed_filesize + os.path.getsize(path)
self.available = list(psutil.virtual_memory())[1]
if self.summed_filesize > self.available:
return False
return True
def output_number_expected(self, expected_input_number=None):
return 1
def automation_error_message(self):
return "Not enough memory. Concatenated file is of size ~"+str(self.summed_filesize) +\
" and available memory is: " + str(self.available)
if __name__ == '__main__':
app = QApplication(sys.argv)
app.aboutToQuit.connect(app.deleteLater)
w = QMainWindow()
w.setCentralWidget(Widget(None))
w.show()
app.exec_()
sys.exit()
|
{
"content_hash": "4ff0ef3f77f42fa8e9019e8154fa9897",
"timestamp": "",
"source": "github",
"line_count": 121,
"max_line_length": 120,
"avg_line_length": 40.97520661157025,
"alnum_prop": 0.6202097620008068,
"repo_name": "Frikster/Mesoscale-Brain-Explorer",
"id": "26c9f8e2ccd994c5d26dd58a8e1f89a15ff630d7",
"size": "4982",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "src/plugins/concatenation.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "501951"
}
],
"symlink_target": ""
}
|
"""users/forms.py: User forms."""
from flask_wtf import FlaskForm
from wtforms import Form, BooleanField, StringField
from wtforms.validators import DataRequired, Length
class UserForm(FlaskForm):
matricula = StringField(u'Matrícula', validators=[Length(min=3, max=10), DataRequired()])
#email = StringField('Correo', validators=[Length(min=6, max=120)])
#notify = BooleanField('Notificar por Twitter', default=True)
|
{
"content_hash": "b98a477b865fd00a8348bed774278b5b",
"timestamp": "",
"source": "github",
"line_count": 10,
"max_line_length": 93,
"avg_line_length": 43.1,
"alnum_prop": 0.7470997679814385,
"repo_name": "marcanuy/notificamesta",
"id": "83f22c4ac428bdfa19a9db3115355692b5bae2b5",
"size": "478",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "notificamesta/users/forms.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "CSS",
"bytes": "2070"
},
{
"name": "HTML",
"bytes": "21443"
},
{
"name": "Python",
"bytes": "43677"
},
{
"name": "Shell",
"bytes": "221"
}
],
"symlink_target": ""
}
|
import numpy as np
from numba import cuda
from numba.cuda.kernels.transpose import transpose
from numba.cuda.testing import unittest
from numba.tests.ddt import ddt, data, unpack
from numba.cuda.testing import skip_on_cudasim
@skip_on_cudasim('Device Array API unsupported in the simulator')
@ddt
class Test(unittest.TestCase):
@data((5, 6, np.float64),
(128, 128, np.complex128),
(1025, 512, np.float64))
@unpack
def test_transpose(self, rows, cols, dtype):
x = np.arange(rows * cols, dtype=dtype).reshape(cols, rows)
y = np.zeros(rows * cols, dtype=dtype).reshape(rows, cols)
dx = cuda.to_device(x)
dy = cuda.cudadrv.devicearray.from_array_like(y)
transpose(dx, dy)
dy.copy_to_host(y)
self.assertTrue(np.all(x.transpose() == y))
if __name__ == '__main__':
unittest.main()
|
{
"content_hash": "e67fc07c460523db1d83b524790c08d9",
"timestamp": "",
"source": "github",
"line_count": 28,
"max_line_length": 67,
"avg_line_length": 31.392857142857142,
"alnum_prop": 0.6473265073947668,
"repo_name": "pombredanne/numba",
"id": "66fd8e38dde2ea535b69c051ceba198f2c5bb341",
"size": "879",
"binary": false,
"copies": "3",
"ref": "refs/heads/master",
"path": "numba/cuda/tests/cudapy/test_transpose.py",
"mode": "33188",
"license": "bsd-2-clause",
"language": [
{
"name": "Batchfile",
"bytes": "2212"
},
{
"name": "C",
"bytes": "249112"
},
{
"name": "C++",
"bytes": "17024"
},
{
"name": "Cuda",
"bytes": "214"
},
{
"name": "HTML",
"bytes": "98846"
},
{
"name": "PowerShell",
"bytes": "3153"
},
{
"name": "Python",
"bytes": "3320040"
},
{
"name": "Shell",
"bytes": "120"
}
],
"symlink_target": ""
}
|
import logging
from ufs_tools.python_app_utils.base import AppBase
from iconizer.utils import start_script_app_iconized
AppBase().add_default_module_path()
__author__ = 'weijia'
log = logging.getLogger(__name__)
if __name__ == '__main__':
# logging.basicConfig(level=logging.DEBUG)
start_script_app_iconized("universal_controller_app")
|
{
"content_hash": "bcabcefdd97a0b1df684502de02acaec",
"timestamp": "",
"source": "github",
"line_count": 16,
"max_line_length": 57,
"avg_line_length": 21.9375,
"alnum_prop": 0.717948717948718,
"repo_name": "weijia/iconizer",
"id": "de2e514d5319743b5bce5be76a2043b246ba79e2",
"size": "351",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "iconizer/app_starter_example.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Batchfile",
"bytes": "88"
},
{
"name": "Makefile",
"bytes": "1186"
},
{
"name": "Python",
"bytes": "99297"
}
],
"symlink_target": ""
}
|
"""Support for AVM FRITZ!SmartHome thermostate devices."""
from __future__ import annotations
from typing import Any
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
ATTR_HVAC_MODE,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_COMFORT,
PRESET_ECO,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.components.sensor import ATTR_STATE_CLASS
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import (
ATTR_BATTERY_LEVEL,
ATTR_DEVICE_CLASS,
ATTR_ENTITY_ID,
ATTR_NAME,
ATTR_TEMPERATURE,
ATTR_UNIT_OF_MEASUREMENT,
PRECISION_HALVES,
TEMP_CELSIUS,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from . import FritzBoxEntity
from .const import (
ATTR_STATE_BATTERY_LOW,
ATTR_STATE_DEVICE_LOCKED,
ATTR_STATE_HOLIDAY_MODE,
ATTR_STATE_LOCKED,
ATTR_STATE_SUMMER_MODE,
ATTR_STATE_WINDOW_OPEN,
CONF_COORDINATOR,
DOMAIN as FRITZBOX_DOMAIN,
)
from .model import ClimateExtraAttributes
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
OPERATION_LIST = [HVAC_MODE_HEAT, HVAC_MODE_OFF]
MIN_TEMPERATURE = 8
MAX_TEMPERATURE = 28
PRESET_MANUAL = "manual"
# special temperatures for on/off in Fritz!Box API (modified by pyfritzhome)
ON_API_TEMPERATURE = 127.0
OFF_API_TEMPERATURE = 126.5
ON_REPORT_SET_TEMPERATURE = 30.0
OFF_REPORT_SET_TEMPERATURE = 0.0
async def async_setup_entry(
hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback
) -> None:
"""Set up the FRITZ!SmartHome thermostat from ConfigEntry."""
entities: list[FritzboxThermostat] = []
coordinator = hass.data[FRITZBOX_DOMAIN][entry.entry_id][CONF_COORDINATOR]
for ain, device in coordinator.data.items():
if not device.has_thermostat:
continue
entities.append(
FritzboxThermostat(
{
ATTR_NAME: f"{device.name}",
ATTR_ENTITY_ID: f"{device.ain}",
ATTR_UNIT_OF_MEASUREMENT: None,
ATTR_DEVICE_CLASS: None,
ATTR_STATE_CLASS: None,
},
coordinator,
ain,
)
)
async_add_entities(entities)
class FritzboxThermostat(FritzBoxEntity, ClimateEntity):
"""The thermostat class for FRITZ!SmartHome thermostates."""
@property
def supported_features(self) -> int:
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self) -> str:
"""Return the unit of measurement that is used."""
return TEMP_CELSIUS
@property
def precision(self) -> float:
"""Return precision 0.5."""
return PRECISION_HALVES
@property
def current_temperature(self) -> float:
"""Return the current temperature."""
return self.device.actual_temperature # type: ignore [no-any-return]
@property
def target_temperature(self) -> float:
"""Return the temperature we try to reach."""
if self.device.target_temperature == ON_API_TEMPERATURE:
return ON_REPORT_SET_TEMPERATURE
if self.device.target_temperature == OFF_API_TEMPERATURE:
return OFF_REPORT_SET_TEMPERATURE
return self.device.target_temperature # type: ignore [no-any-return]
async def async_set_temperature(self, **kwargs: Any) -> None:
"""Set new target temperature."""
if kwargs.get(ATTR_HVAC_MODE) is not None:
hvac_mode = kwargs[ATTR_HVAC_MODE]
await self.async_set_hvac_mode(hvac_mode)
elif kwargs.get(ATTR_TEMPERATURE) is not None:
temperature = kwargs[ATTR_TEMPERATURE]
await self.hass.async_add_executor_job(
self.device.set_target_temperature, temperature
)
await self.coordinator.async_refresh()
@property
def hvac_mode(self) -> str:
"""Return the current operation mode."""
if (
self.device.target_temperature == OFF_REPORT_SET_TEMPERATURE
or self.device.target_temperature == OFF_API_TEMPERATURE
):
return HVAC_MODE_OFF
return HVAC_MODE_HEAT
@property
def hvac_modes(self) -> list[str]:
"""Return the list of available operation modes."""
return OPERATION_LIST
async def async_set_hvac_mode(self, hvac_mode: str) -> None:
"""Set new operation mode."""
if hvac_mode == HVAC_MODE_OFF:
await self.async_set_temperature(temperature=OFF_REPORT_SET_TEMPERATURE)
else:
await self.async_set_temperature(
temperature=self.device.comfort_temperature
)
@property
def preset_mode(self) -> str | None:
"""Return current preset mode."""
if self.device.target_temperature == self.device.comfort_temperature:
return PRESET_COMFORT
if self.device.target_temperature == self.device.eco_temperature:
return PRESET_ECO
return None
@property
def preset_modes(self) -> list[str]:
"""Return supported preset modes."""
return [PRESET_ECO, PRESET_COMFORT]
async def async_set_preset_mode(self, preset_mode: str) -> None:
"""Set preset mode."""
if preset_mode == PRESET_COMFORT:
await self.async_set_temperature(
temperature=self.device.comfort_temperature
)
elif preset_mode == PRESET_ECO:
await self.async_set_temperature(temperature=self.device.eco_temperature)
@property
def min_temp(self) -> int:
"""Return the minimum temperature."""
return MIN_TEMPERATURE
@property
def max_temp(self) -> int:
"""Return the maximum temperature."""
return MAX_TEMPERATURE
@property
def extra_state_attributes(self) -> ClimateExtraAttributes:
"""Return the device specific state attributes."""
attrs: ClimateExtraAttributes = {
ATTR_STATE_BATTERY_LOW: self.device.battery_low,
ATTR_STATE_DEVICE_LOCKED: self.device.device_lock,
ATTR_STATE_LOCKED: self.device.lock,
}
# the following attributes are available since fritzos 7
if self.device.battery_level is not None:
attrs[ATTR_BATTERY_LEVEL] = self.device.battery_level
if self.device.holiday_active is not None:
attrs[ATTR_STATE_HOLIDAY_MODE] = self.device.holiday_active
if self.device.summer_active is not None:
attrs[ATTR_STATE_SUMMER_MODE] = self.device.summer_active
if ATTR_STATE_WINDOW_OPEN is not None:
attrs[ATTR_STATE_WINDOW_OPEN] = self.device.window_open
return attrs
|
{
"content_hash": "86434c34b0c14addbf817c6aa97416a4",
"timestamp": "",
"source": "github",
"line_count": 209,
"max_line_length": 85,
"avg_line_length": 33.119617224880386,
"alnum_prop": 0.6421554464027738,
"repo_name": "sander76/home-assistant",
"id": "4baa1b3b81afc669371f1e041a31942d87be72b4",
"size": "6922",
"binary": false,
"copies": "1",
"ref": "refs/heads/dev",
"path": "homeassistant/components/fritzbox/climate.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "1795"
},
{
"name": "Python",
"bytes": "36548768"
},
{
"name": "Shell",
"bytes": "4910"
}
],
"symlink_target": ""
}
|
try:
from unittest import mock
except ImportError: # pragma: NO PY3 COVER
import mock
import pytest
from google.cloud.ndb import context as context_module
from google.cloud.ndb import _eventloop
from google.cloud.ndb import exceptions
from google.cloud.ndb import _remote
from google.cloud.ndb import tasklets
from . import utils
def test___all__():
utils.verify___all__(tasklets)
def test_add_flow_exception():
with pytest.raises(NotImplementedError):
tasklets.add_flow_exception()
class TestFuture:
@staticmethod
def test_constructor():
future = tasklets.Future()
assert future.running()
assert not future.done()
assert future.info == "Unknown"
@staticmethod
def test_constructor_w_info():
future = tasklets.Future("Testing")
assert future.running()
assert not future.done()
assert future.info == "Testing"
@staticmethod
def test___repr__():
future = tasklets.Future("The Children")
assert repr(future) == "Future('The Children') <{}>".format(id(future))
@staticmethod
def test_set_result():
future = tasklets.Future()
future.set_result(42)
assert future.result() == 42
assert future.get_result() == 42
assert future.done()
assert not future.running()
assert future.exception() is None
assert future.get_exception() is None
assert future.get_traceback() is None
@staticmethod
def test_set_result_already_done():
future = tasklets.Future()
future.set_result(42)
with pytest.raises(RuntimeError):
future.set_result(42)
@staticmethod
def test_add_done_callback():
callback1 = mock.Mock()
callback2 = mock.Mock()
future = tasklets.Future()
future.add_done_callback(callback1)
future.add_done_callback(callback2)
future.set_result(42)
callback1.assert_called_once_with(future)
callback2.assert_called_once_with(future)
@staticmethod
def test_add_done_callback_already_done():
callback = mock.Mock()
future = tasklets.Future()
future.set_result(42)
future.add_done_callback(callback)
callback.assert_called_once_with(future)
@staticmethod
def test_set_exception():
future = tasklets.Future()
error = Exception("Spurious Error")
future.set_exception(error)
assert future.exception() is error
assert future.get_exception() is error
assert future.get_traceback() is getattr(error, "__traceback__", None)
with pytest.raises(Exception):
future.result()
@staticmethod
def test_set_exception_with_callback():
callback = mock.Mock()
future = tasklets.Future()
future.add_done_callback(callback)
error = Exception("Spurious Error")
future.set_exception(error)
assert future.exception() is error
assert future.get_exception() is error
assert future.get_traceback() is getattr(error, "__traceback__", None)
callback.assert_called_once_with(future)
@staticmethod
def test_set_exception_already_done():
future = tasklets.Future()
error = Exception("Spurious Error")
future.set_exception(error)
with pytest.raises(RuntimeError):
future.set_exception(error)
@staticmethod
@mock.patch("google.cloud.ndb.tasklets._eventloop")
def test_wait(_eventloop):
def side_effects(future):
yield True
yield True
future.set_result(42)
yield True
future = tasklets.Future()
_eventloop.run1.side_effect = side_effects(future)
future.wait()
assert future.result() == 42
assert _eventloop.run1.call_count == 3
@staticmethod
@mock.patch("google.cloud.ndb.tasklets._eventloop")
def test_wait_loop_exhausted(_eventloop):
future = tasklets.Future()
_eventloop.run1.return_value = False
with pytest.raises(RuntimeError):
future.wait()
@staticmethod
@mock.patch("google.cloud.ndb.tasklets._eventloop")
def test_check_success(_eventloop):
def side_effects(future):
yield True
yield True
future.set_result(42)
yield True
future = tasklets.Future()
_eventloop.run1.side_effect = side_effects(future)
future.check_success()
assert future.result() == 42
assert _eventloop.run1.call_count == 3
@staticmethod
@mock.patch("google.cloud.ndb.tasklets._eventloop")
def test_check_success_failure(_eventloop):
error = Exception("Spurious error")
def side_effects(future):
yield True
yield True
future.set_exception(error)
yield True
future = tasklets.Future()
_eventloop.run1.side_effect = side_effects(future)
with pytest.raises(Exception) as error_context:
future.check_success()
assert error_context.value is error
@staticmethod
@mock.patch("google.cloud.ndb.tasklets._eventloop")
def test_result_block_for_result(_eventloop):
def side_effects(future):
yield True
yield True
future.set_result(42)
yield True
future = tasklets.Future()
_eventloop.run1.side_effect = side_effects(future)
assert future.result() == 42
assert _eventloop.run1.call_count == 3
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_cancel():
# Integration test. Actually test that a cancel propagates properly.
rpc = tasklets.Future("Fake RPC")
wrapped_rpc = _remote.RemoteCall(rpc, "Wrapped Fake RPC")
@tasklets.tasklet
def inner_tasklet():
yield wrapped_rpc
@tasklets.tasklet
def outer_tasklet():
yield inner_tasklet()
future = outer_tasklet()
assert not future.cancelled()
future.cancel()
assert rpc.cancelled()
with pytest.raises(exceptions.Cancelled):
future.result()
assert future.cancelled()
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_cancel_already_done():
future = tasklets.Future("testing")
future.set_result(42)
future.cancel() # noop
assert not future.cancelled()
assert future.result() == 42
@staticmethod
def test_cancelled():
future = tasklets.Future()
assert future.cancelled() is False
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_wait_any():
futures = [tasklets.Future() for _ in range(3)]
def callback():
futures[1].set_result(42)
_eventloop.add_idle(callback)
future = tasklets.Future.wait_any(futures)
assert future is futures[1]
assert future.result() == 42
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_wait_any_loop_exhausted():
futures = [tasklets.Future() for _ in range(3)]
with pytest.raises(RuntimeError):
tasklets.Future.wait_any(futures)
@staticmethod
def test_wait_any_no_futures():
assert tasklets.Future.wait_any(()) is None
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_wait_all():
futures = [tasklets.Future() for _ in range(3)]
def make_callback(index, result):
def callback():
futures[index].set_result(result)
return callback
_eventloop.add_idle(make_callback(0, 42))
_eventloop.add_idle(make_callback(1, 43))
_eventloop.add_idle(make_callback(2, 44))
tasklets.Future.wait_all(futures)
assert futures[0].done()
assert futures[0].result() == 42
assert futures[1].done()
assert futures[1].result() == 43
assert futures[2].done()
assert futures[2].result() == 44
@staticmethod
def test_wait_all_no_futures():
assert tasklets.Future.wait_all(()) is None
class Test_TaskletFuture:
@staticmethod
def test_constructor():
generator = object()
context = object()
future = tasklets._TaskletFuture(generator, context)
assert future.generator is generator
assert future.context is context
assert future.info == "Unknown"
@staticmethod
def test___repr__():
future = tasklets._TaskletFuture(None, None, info="Female")
assert repr(future) == "_TaskletFuture('Female') <{}>".format(id(future))
@staticmethod
def test__advance_tasklet_return(in_context):
def generator_function():
yield
raise tasklets.Return(42)
generator = generator_function()
next(generator) # skip ahead to return
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
assert future.result() == 42
@staticmethod
def test__advance_tasklet_generator_raises(in_context):
error = Exception("Spurious error.")
def generator_function():
yield
raise error
generator = generator_function()
next(generator) # skip ahead to return
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
assert future.exception() is error
@staticmethod
def test__advance_tasklet_bad_yield(in_context):
def generator_function():
yield 42
generator = generator_function()
future = tasklets._TaskletFuture(generator, in_context)
with pytest.raises(RuntimeError):
future._advance_tasklet()
@staticmethod
def test__advance_tasklet_dependency_returns(in_context):
def generator_function(dependency):
some_value = yield dependency
raise tasklets.Return(some_value + 42)
dependency = tasklets.Future()
generator = generator_function(dependency)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
dependency.set_result(21)
assert future.result() == 63
@staticmethod
def test__advance_tasklet_dependency_raises(in_context):
def generator_function(dependency):
yield dependency
error = Exception("Spurious error.")
dependency = tasklets.Future()
generator = generator_function(dependency)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
dependency.set_exception(error)
assert future.exception() is error
with pytest.raises(Exception):
future.result()
@staticmethod
def test__advance_tasklet_dependency_raises_with_try_except(in_context):
def generator_function(dependency, error_handler):
try:
yield dependency
except Exception:
result = yield error_handler
raise tasklets.Return(result)
error = Exception("Spurious error.")
dependency = tasklets.Future()
error_handler = tasklets.Future()
generator = generator_function(dependency, error_handler)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
dependency.set_exception(error)
assert future.running()
error_handler.set_result("hi mom!")
assert future.result() == "hi mom!"
@staticmethod
def test__advance_tasklet_yields_rpc(in_context):
def generator_function(dependency):
value = yield dependency
raise tasklets.Return(value + 3)
dependency = mock.Mock(spec=_remote.RemoteCall)
dependency.exception.return_value = None
dependency.result.return_value = 8
generator = generator_function(dependency)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
callback = dependency.add_done_callback.call_args[0][0]
callback(dependency)
_eventloop.run()
assert future.result() == 11
@staticmethod
def test__advance_tasklet_parallel_yield(in_context):
def generator_function(dependencies):
one, two = yield dependencies
raise tasklets.Return(one + two)
dependencies = (tasklets.Future(), tasklets.Future())
generator = generator_function(dependencies)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
dependencies[0].set_result(8)
dependencies[1].set_result(3)
assert future.result() == 11
assert future.context is in_context
@staticmethod
def test_cancel_not_waiting(in_context):
dependency = tasklets.Future()
future = tasklets._TaskletFuture(None, in_context)
future.cancel()
assert not dependency.cancelled()
with pytest.raises(exceptions.Cancelled):
future.result()
@staticmethod
def test_cancel_waiting_on_dependency(in_context):
def generator_function(dependency):
yield dependency
dependency = tasklets.Future()
generator = generator_function(dependency)
future = tasklets._TaskletFuture(generator, in_context)
future._advance_tasklet()
future.cancel()
assert dependency.cancelled()
with pytest.raises(exceptions.Cancelled):
future.result()
class Test_MultiFuture:
@staticmethod
def test___repr__():
this, that = (tasklets.Future("this"), tasklets.Future("that"))
future = tasklets._MultiFuture((this, that))
assert repr(future) == (
"_MultiFuture(Future('this') <{}>,"
" Future('that') <{}>) <{}>".format(id(this), id(that), id(future))
)
@staticmethod
def test_success():
dependencies = (tasklets.Future(), tasklets.Future())
future = tasklets._MultiFuture(dependencies)
dependencies[0].set_result("one")
dependencies[1].set_result("two")
assert future.result() == ("one", "two")
@staticmethod
def test_error():
dependencies = (tasklets.Future(), tasklets.Future())
future = tasklets._MultiFuture(dependencies)
error = Exception("Spurious error.")
dependencies[0].set_exception(error)
dependencies[1].set_result("two")
assert future.exception() is error
with pytest.raises(Exception):
future.result()
@staticmethod
def test_cancel():
dependencies = (tasklets.Future(), tasklets.Future())
future = tasklets._MultiFuture(dependencies)
future.cancel()
assert dependencies[0].cancelled()
assert dependencies[1].cancelled()
with pytest.raises(exceptions.Cancelled):
future.result()
@staticmethod
def test_no_dependencies():
future = tasklets._MultiFuture(())
assert future.result() == ()
@staticmethod
def test_nested():
dependencies = [tasklets.Future() for _ in range(3)]
future = tasklets._MultiFuture((dependencies[0], dependencies[1:]))
for i, dependency in enumerate(dependencies):
dependency.set_result(i)
assert future.result() == (0, (1, 2))
class Test__get_return_value:
@staticmethod
def test_no_args():
stop = StopIteration()
assert tasklets._get_return_value(stop) is None
@staticmethod
def test_one_arg():
stop = StopIteration(42)
assert tasklets._get_return_value(stop) == 42
@staticmethod
def test_two_args():
stop = StopIteration(42, 21)
assert tasklets._get_return_value(stop) == (42, 21)
class Test_tasklet:
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_generator():
@tasklets.tasklet
def generator(dependency):
value = yield dependency
raise tasklets.Return(value + 3)
dependency = tasklets.Future()
future = generator(dependency)
assert isinstance(future, tasklets._TaskletFuture)
dependency.set_result(8)
assert future.result() == 11
# Can't make this work with 2.7, because the return with argument inside
# generator error crashes the pytest collection process, even with skip
# @staticmethod
# @pytest.mark.skipif(sys.version_info[0] == 2, reason="requires python3")
# @pytest.mark.usefixtures("in_context")
# def test_generator_using_return():
# @tasklets.tasklet
# def generator(dependency):
# value = yield dependency
# return value + 3
# dependency = tasklets.Future()
# future = generator(dependency)
# assert isinstance(future, tasklets._TaskletFuture)
# dependency.set_result(8)
# assert future.result() == 11
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_regular_function():
@tasklets.tasklet
def regular_function(value):
return value + 3
future = regular_function(8)
assert isinstance(future, tasklets.Future)
assert future.result() == 11
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_regular_function_raises_Return():
@tasklets.tasklet
def regular_function(value):
raise tasklets.Return(value + 3)
future = regular_function(8)
assert isinstance(future, tasklets.Future)
assert future.result() == 11
@staticmethod
def test_context_management(in_context):
@tasklets.tasklet
def some_task(transaction, future):
assert context_module.get_context().transaction == transaction
yield future
raise tasklets.Return(context_module.get_context().transaction)
future_foo = tasklets.Future("foo")
with in_context.new(transaction="foo").use():
task_foo = some_task("foo", future_foo)
future_bar = tasklets.Future("bar")
with in_context.new(transaction="bar").use():
task_bar = some_task("bar", future_bar)
future_foo.set_result(None)
future_bar.set_result(None)
assert task_foo.result() == "foo"
assert task_bar.result() == "bar"
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_context_changed_in_tasklet():
@tasklets.tasklet
def some_task(transaction, future1, future2):
context = context_module.get_context()
assert context.transaction is None
with context.new(transaction=transaction).use():
assert context_module.get_context().transaction == transaction
yield future1
assert context_module.get_context().transaction == transaction
yield future2
assert context_module.get_context().transaction == transaction
assert context_module.get_context() is context
future_foo1 = tasklets.Future("foo1")
future_foo2 = tasklets.Future("foo2")
task_foo = some_task("foo", future_foo1, future_foo2)
future_bar1 = tasklets.Future("bar1")
future_bar2 = tasklets.Future("bar2")
task_bar = some_task("bar", future_bar1, future_bar2)
future_foo1.set_result(None)
future_bar1.set_result(None)
future_foo2.set_result(None)
future_bar2.set_result(None)
task_foo.check_success()
task_bar.check_success()
class Test_wait_any:
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_it():
futures = [tasklets.Future() for _ in range(3)]
def callback():
futures[1].set_result(42)
_eventloop.add_idle(callback)
future = tasklets.wait_any(futures)
assert future is futures[1]
assert future.result() == 42
@staticmethod
def test_it_no_futures():
assert tasklets.wait_any(()) is None
class Test_wait_all:
@staticmethod
@pytest.mark.usefixtures("in_context")
def test_it():
futures = [tasklets.Future() for _ in range(3)]
def make_callback(index, result):
def callback():
futures[index].set_result(result)
return callback
_eventloop.add_idle(make_callback(0, 42))
_eventloop.add_idle(make_callback(1, 43))
_eventloop.add_idle(make_callback(2, 44))
tasklets.wait_all(futures)
assert futures[0].done()
assert futures[0].result() == 42
assert futures[1].done()
assert futures[1].result() == 43
assert futures[2].done()
assert futures[2].result() == 44
@staticmethod
def test_it_no_futures():
assert tasklets.wait_all(()) is None
@pytest.mark.usefixtures("in_context")
@mock.patch("google.cloud.ndb._eventloop.time")
def test_sleep(time_module, context):
time_module.time.side_effect = [0, 0, 1]
future = tasklets.sleep(1)
assert future.get_result() is None
time_module.sleep.assert_called_once_with(1)
def test_make_context():
with pytest.raises(NotImplementedError):
tasklets.make_context()
def test_make_default_context():
with pytest.raises(NotImplementedError):
tasklets.make_default_context()
class TestQueueFuture:
@staticmethod
def test_constructor():
with pytest.raises(NotImplementedError):
tasklets.QueueFuture()
class TestReducingFuture:
@staticmethod
def test_constructor():
with pytest.raises(NotImplementedError):
tasklets.ReducingFuture()
def test_Return():
assert not issubclass(tasklets.Return, StopIteration)
assert issubclass(tasklets.Return, Exception)
class TestSerialQueueFuture:
@staticmethod
def test_constructor():
with pytest.raises(NotImplementedError):
tasklets.SerialQueueFuture()
def test_set_context():
with pytest.raises(NotImplementedError):
tasklets.set_context()
@pytest.mark.usefixtures("in_context")
def test_synctasklet():
@tasklets.synctasklet
def generator_function(value):
future = tasklets.Future(value)
future.set_result(value)
x = yield future
raise tasklets.Return(x + 3)
result = generator_function(8)
assert result == 11
@pytest.mark.usefixtures("in_context")
def test_toplevel():
@tasklets.toplevel
def generator_function(value):
future = tasklets.Future(value)
future.set_result(value)
x = yield future
raise tasklets.Return(x + 3)
idle = mock.Mock(__name__="idle", return_value=None)
_eventloop.add_idle(idle)
result = generator_function(8)
assert result == 11
idle.assert_called_once_with()
|
{
"content_hash": "6d9501c3f185c57f92e3dc34fd91d60e",
"timestamp": "",
"source": "github",
"line_count": 742,
"max_line_length": 81,
"avg_line_length": 31.040431266846362,
"alnum_prop": 0.6222646752344564,
"repo_name": "googleapis/python-ndb",
"id": "ce00f7f1c842b2794ac7be19d93608abae8a4ca7",
"size": "23608",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "tests/unit/test_tasklets.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "2210"
},
{
"name": "Python",
"bytes": "1621801"
},
{
"name": "Shell",
"bytes": "29468"
}
],
"symlink_target": ""
}
|
from datetime import datetime, timedelta
from images.images import FAVICON, PIXEL
import uuid
from flask import Flask, make_response, request, redirect
app = Flask(__name__)
app.permanent_session_lifetime = timedelta(days=365)
def add_headers(response):
"""
Adds custom user agent and query string headers, appends aguid and
myguid cookie values to query string header
"""
ua = request.headers.get('User-Agent', '-')
response.headers['X-User-Agent'] = ua.replace(' ', '+')
cookie_qs = "&".join(['='.join([k, v])
for k, v in get_cookie_dict(response).items() if v])
qs = "&".join([x for x in [request.query_string, cookie_qs] if x])
response.headers['X-Uri-Query'] = qs
def get_cookie_dict(response):
"""
Get cookie dictionary from Set-Cookie response header
"""
return {k: v for k, v in [c.split("; ")[0].split("=")
for c in response.headers.getlist('Set-Cookie')]}
def get_cookie_domain():
"""
Determines the root domain for a domain-wide cookie
Assumptions:
- Domain tld is not two-level (.com, not .co.uk)
- Code is not being accessed via hostname (testing on localhost)
Outputs:
:domain: domain for domain-wide cookie
"""
# Strip out port number and get root domain and TLD
domain = request.host.split(':')[0].split('.')[-2:]
# Add dot prefix for domain-wide cookie format
return '.' + '.'.join(domain)
def update_or_set_cookie(response):
"""
Determines if there is an aguid or myguid cookie with the current request
Updates cookie value if it is not valid
Updates cookie expiration if it already exists and is valid
Sets a domain-wide aguid cookie if there is not one
Adds P3P Policy
"""
# Add P3P Policy
response.headers['P3P'] = 'CP="ALL DSP COR CURa IND PHY UNR"'
expires = datetime.utcnow() + app.permanent_session_lifetime
domain = get_cookie_domain()
try:
# Validate aguid cookie
aguid = uuid.UUID(request.cookies.get('aguid'))
except (ValueError, TypeError):
# Set new aguid value
aguid = uuid.uuid4()
finally:
# Update expiration or set aguid cookie
response.set_cookie('aguid', aguid.hex,
expires=expires, domain=domain)
try:
# Validate myguid cookie
myguid = uuid.UUID(request.cookies.get('myguid'))
except ValueError:
# Delete invalid myguid cookie
response.set_cookie('myguid', '', expires=0, domain=domain)
except TypeError:
# Nothing to do since myguid does not exist
pass
else:
# Update myguid cookie expiration
response.set_cookie('myguid', myguid.hex,
expires=expires, domain=domain)
@app.route("/pixel.gif")
def pixel_gif():
"""
Returns a tracking pixel with an attached anonymous cookie
"""
response = make_response(PIXEL)
response.headers['Content-Type'] = 'image/gif'
update_or_set_cookie(response)
add_headers(response)
return response
@app.route("/favicon.ico")
def favicon_ico():
"""
Returns a tracking favicon.ico with an attached anonymous cookie
"""
response = make_response(FAVICON)
response.headers['Content-Type'] = 'image/x-icon'
update_or_set_cookie(response)
add_headers(response)
return response
@app.route("/", defaults={'path': ''})
@app.route("/<path:path>")
def redirect_all(path):
"""
Redirects all other requests to www.my.jobs,
keeping the requested path
"""
return redirect("http://www.my.jobs/%s" % path, code=301)
if __name__ == "__main__":
app.run()
|
{
"content_hash": "2ea14d53f96342131ce3b364f5c389af",
"timestamp": "",
"source": "github",
"line_count": 125,
"max_line_length": 77,
"avg_line_length": 29.48,
"alnum_prop": 0.633921302578019,
"repo_name": "DirectEmployers/MyJobs-pixel",
"id": "d7cabee2b35e675cc0f5143846da5b41e7ebb463",
"size": "3685",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "pixel.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "JavaScript",
"bytes": "103210"
},
{
"name": "Python",
"bytes": "11449"
}
],
"symlink_target": ""
}
|
import fnmatch
import importlib
import inspect
import json
import os
import pdb
import re
import sys
import unittest
import traceback
from datetime import datetime
from collections import OrderedDict
# This ensures that absolute imports of typ modules will work when
# running typ/runner.py as a script even if typ is not installed.
# We need this entry in addition to the one in __main__.py to ensure
# that typ/runner.py works when invoked via subprocess on windows in
# _spawn_main().
path_to_file = os.path.realpath(__file__)
if path_to_file.endswith('.pyc'): # pragma: no cover
path_to_file = path_to_file[:-1]
dir_above_typ = os.path.dirname(os.path.dirname(path_to_file))
dir_cov = os.path.join(os.path.dirname(dir_above_typ), 'coverage')
for path in (dir_above_typ, dir_cov):
if path not in sys.path: # pragma: no cover
sys.path.append(path)
from typ import artifacts
from typ import json_results
from typ import result_sink
from typ.arg_parser import ArgumentParser
from typ.expectations_parser import TestExpectations, Expectation
from typ.host import Host
from typ.pool import make_pool_group
from typ.stats import Stats
from typ.printer import Printer
from typ.test_case import TestCase as TypTestCase
from typ.version import VERSION
Result = json_results.Result
ResultSet = json_results.ResultSet
ResultType = json_results.ResultType
FailureReason = json_results.FailureReason
# Matches the first line of stack entries in formatted Python tracebacks.
# The first capture group is the name of the file, the second is the line.
# The method name is not extracted.
# See: https://github.com/python/cpython/blob/3.10/Lib/traceback.py#L440
_TRACEBACK_FILE_RE = re.compile(r'^ File "[^"]*[/\\](.*)", line ([0-9]+), in ')
def main(argv=None, host=None, win_multiprocessing=None, **defaults):
host = host or Host()
runner = Runner(host=host)
if win_multiprocessing is not None:
runner.win_multiprocessing = win_multiprocessing
return runner.main(argv, **defaults)
class TestInput(object):
def __init__(self, name, msg='', timeout=None, expected=None, iteration=0):
self.name = name
self.msg = msg
self.timeout = timeout
self.expected = expected
# Iteration makes more sense as part of the test run, not the test
# input, but since the pool used to run tests persists across
# iterations, we need to store the iteration number in something that
# gets updated each test run, such as TestInput.
self.iteration = iteration
class TestSet(object):
def __init__(self, test_name_prefix='', iteration=0):
self.test_name_prefix = test_name_prefix
self.parallel_tests = []
self.isolated_tests = []
self.tests_to_skip = []
self.iteration = iteration
def copy(self):
test_set = TestSet(self.test_name_prefix)
test_set.tests_to_skip = self.tests_to_skip[:]
test_set.isolated_tests = self.isolated_tests[:]
test_set.parallel_tests = self.parallel_tests[:]
return test_set
def _get_test_name(self, test_case):
_validate_test_starts_with_prefix(
self.test_name_prefix, test_case.id())
return test_case.id()[len(self.test_name_prefix):]
def add_test_to_skip(self, test_case, reason=''):
self.tests_to_skip.append(
TestInput(self._get_test_name(
test_case), reason, iteration=self.iteration))
def add_test_to_run_isolated(self, test_case):
self.isolated_tests.append(
TestInput(self._get_test_name(test_case), iteration=self.iteration))
def add_test_to_run_in_parallel(self, test_case):
self.parallel_tests.append(
TestInput(self._get_test_name(test_case), iteration=self.iteration))
def _validate_test_starts_with_prefix(prefix, test_name):
assert test_name.startswith(prefix), (
'The test prefix passed at the command line does not match the prefix '
'of all the tests generated')
class WinMultiprocessing(object):
ignore = 'ignore'
importable = 'importable'
spawn = 'spawn'
values = [ignore, importable, spawn]
class _AddTestsError(Exception):
pass
class Runner(object):
def __init__(self, host=None):
self.args = None
self.classifier = None
self.cov = None
self.context = None
self.coverage_source = None
self.host = host or Host()
self.loader = unittest.loader.TestLoader()
self.printer = None
self.setup_fn = None
self.stats = None
self.teardown_fn = None
self.top_level_dir = None
self.top_level_dirs = []
self.win_multiprocessing = WinMultiprocessing.spawn
self.final_responses = []
self.has_expectations = False
self.expectations = None
self.metadata = {}
self.path_delimiter = json_results.DEFAULT_TEST_SEPARATOR
self.artifact_output_dir = None
# initialize self.args to the defaults.
parser = ArgumentParser(self.host)
self.parse_args(parser, [])
def main(self, argv=None, **defaults):
parser = ArgumentParser(self.host)
self.parse_args(parser, argv, **defaults)
if parser.exit_status is not None:
return parser.exit_status
try:
ret, _, _ = self.run()
return ret
except KeyboardInterrupt:
self.print_("interrupted, exiting", stream=self.host.stderr)
return 130
def parse_args(self, parser, argv, **defaults):
for attrname in defaults:
if not hasattr(self.args, attrname):
parser.error("Unknown default argument name '%s'" % attrname,
bailout=False)
return
parser.set_defaults(**defaults)
self.args = parser.parse_args(args=argv)
if parser.exit_status is not None:
return
def print_(self, msg='', end='\n', stream=None):
self.host.print_(msg, end, stream=stream)
def run(self, test_set=None):
ret = 0
h = self.host
if self.args.version:
self.print_(VERSION)
return ret, None, None
if self.args.write_full_results_to:
self.artifact_output_dir = os.path.join(
os.path.dirname(
self.args.write_full_results_to), 'artifacts')
should_spawn = self._check_win_multiprocessing()
if should_spawn:
return self._spawn(test_set)
ret = self._set_up_runner()
if ret:
return ret, None, None
find_start = h.time()
if self.cov: # pragma: no cover
self.cov.erase()
self.cov.start()
full_results = None
result_set = ResultSet()
if not test_set:
ret, test_set = self.find_tests(self.args)
find_end = h.time()
if not ret:
self.stats.total = (len(test_set.parallel_tests) +
len(test_set.isolated_tests) +
len(test_set.tests_to_skip)) * self.args.repeat
all_tests = [ti.name for ti in
_sort_inputs(test_set.parallel_tests +
test_set.isolated_tests +
test_set.tests_to_skip)]
self.metadata = {tup[0]:tup[1]
for tup in
[md.split('=', 1) for md in self.args.metadata]}
if self.args.test_name_prefix:
self.metadata['test_name_prefix'] = self.args.test_name_prefix
if self.args.tags:
self.metadata['tags'] = self.args.tags
if self.args.expectations_files:
self.metadata['expectations_files'] = [
os.path.basename(exp)
if not self.args.repository_absolute_path
else ('//' + os.path.relpath(
exp, self.args.repository_absolute_path).replace(
os.path.sep, '/'))
for exp in self.args.expectations_files]
if self.args.list_only:
self.print_('\n'.join(all_tests))
else:
self.print_('Start running tests: %s' % str(datetime.now()))
for _ in range(self.args.repeat):
current_ret, full_results=self._run_tests(
result_set, test_set.copy(), all_tests)
ret = ret or current_ret
if self.cov: # pragma: no cover
self.cov.stop()
self.cov.save()
test_end = h.time()
trace = self._trace_from_results(result_set)
if full_results:
self._summarize(full_results)
self._write(self.args.write_full_results_to, full_results)
upload_ret = self._upload(full_results)
reporting_end = h.time()
self._add_trace_event(trace, 'run', find_start, reporting_end)
self._add_trace_event(trace, 'discovery', find_start, find_end)
self._add_trace_event(trace, 'testing', find_end, test_end)
self._add_trace_event(trace, 'reporting', test_end, reporting_end)
self._write(self.args.write_trace_to, trace)
cov_ret = self.report_coverage() if self.args.coverage else 0
# Exit with the code of the first failing step, but do not skip
# any steps with short-circuiting.
ret = ret or upload_ret or cov_ret
return ret, full_results, trace
def _check_win_multiprocessing(self):
wmp = self.win_multiprocessing
ignore, importable, spawn = WinMultiprocessing.values
if wmp not in WinMultiprocessing.values:
raise ValueError('illegal value %s for win_multiprocessing' %
wmp)
h = self.host
if wmp == ignore and h.platform == 'win32': # pragma: win32
raise ValueError('Cannot use WinMultiprocessing.ignore for '
'win_multiprocessing when actually running '
'on Windows.')
if wmp == ignore or self.args.jobs == 1:
return False
if wmp == importable:
if self._main_is_importable():
return False
raise ValueError('The __main__ module (%s) ' # pragma: no cover
'may not be importable' %
sys.modules['__main__'].__file__)
assert wmp == spawn
return True
def _main_is_importable(self): # pragma: untested
path = sys.modules['__main__'].__file__
if not path:
return False
if path.endswith('.pyc'):
path = path[:-1]
if not path.endswith('.py'):
return False
if path.endswith('__main__.py'):
# main modules are not directly importable.
return False
path = self.host.realpath(path)
for d in sys.path:
if path.startswith(self.host.realpath(d)):
return True
return False # pragma: no cover
def _spawn(self, test_set):
# TODO: Handle picklable hooks, rather than requiring them to be None.
assert self.classifier is None
assert self.context is None
assert self.setup_fn is None
assert self.teardown_fn is None
assert test_set is None
h = self.host
if self.args.write_trace_to: # pragma: untested
should_delete_trace = False
else:
should_delete_trace = True
fp = h.mktempfile(delete=False)
fp.close()
self.args.write_trace_to = fp.name
if self.args.write_full_results_to: # pragma: untested
should_delete_results = False
else:
should_delete_results = True
fp = h.mktempfile(delete=False)
fp.close()
self.args.write_full_results_to = fp.name
argv = ArgumentParser(h).argv_from_args(self.args)
ret = h.call_inline([h.python_interpreter, path_to_file] + argv)
trace = self._read_and_delete(self.args.write_trace_to,
should_delete_trace)
full_results = self._read_and_delete(self.args.write_full_results_to,
should_delete_results)
return ret, full_results, trace
def _set_up_runner(self):
h = self.host
args = self.args
self.stats = Stats(args.status_format, h.time, args.jobs)
self.printer = Printer(
self.print_, args.overwrite, args.terminal_width)
if self.args.top_level_dirs and self.args.top_level_dir:
self.print_(
'Cannot specify both --top-level-dir and --top-level-dirs',
stream=h.stderr)
return 1
self.top_level_dirs = args.top_level_dirs
if not self.top_level_dirs and args.top_level_dir:
self.top_level_dirs = [args.top_level_dir]
if not self.top_level_dirs:
for test in [t for t in args.tests if h.exists(t)]:
if h.isdir(test):
top_dir = test
else:
top_dir = h.dirname(test)
while h.exists(top_dir, '__init__.py'):
top_dir = h.dirname(top_dir)
top_dir = h.realpath(top_dir)
if not top_dir in self.top_level_dirs:
self.top_level_dirs.append(top_dir)
if not self.top_level_dirs:
top_dir = h.getcwd()
while h.exists(top_dir, '__init__.py'):
top_dir = h.dirname(top_dir)
top_dir = h.realpath(top_dir)
self.top_level_dirs.append(top_dir)
if not self.top_level_dir and self.top_level_dirs:
self.top_level_dir = self.top_level_dirs[0]
for path in self.top_level_dirs:
h.add_to_path(path)
for path in args.path:
h.add_to_path(path)
if args.coverage: # pragma: no cover
try:
import coverage
except ImportError:
self.print_('Error: coverage is not installed.')
return 1
source = self.args.coverage_source
if not source:
source = self.top_level_dirs + self.args.path
self.coverage_source = source
self.cov = coverage.Coverage(source=self.coverage_source,
data_suffix=True)
self.cov.erase()
if args.expectations_files:
ret = self.parse_expectations()
if ret:
return ret
elif args.tags:
self.print_('Error: tags require expectations files.')
return 1
return 0
def parse_expectations(self):
args = self.args
if len(args.expectations_files) != 1:
# TODO(crbug.com/835690): Fix this.
self.print_(
'Only a single expectation file is currently supported',
stream=self.host.stderr)
return 1
contents = self.host.read_text_file(args.expectations_files[0])
expectations = TestExpectations(set(args.tags), args.ignored_tags)
err, msg = expectations.parse_tagged_list(
contents, args.expectations_files[0])
if err:
self.print_(msg, stream=self.host.stderr)
return err
self.has_expectations = True
self.expectations = expectations
def find_tests(self, args):
test_set = TestSet(self.args.test_name_prefix)
orig_skip = unittest.skip
orig_skip_if = unittest.skipIf
if args.all:
unittest.skip = lambda reason: lambda x: x
unittest.skipIf = lambda condition, reason: lambda x: x
try:
names = self._name_list_from_args(args)
classifier = self.classifier or self.default_classifier
for name in names:
try:
self._add_tests_to_set(test_set, args.suffixes,
self.top_level_dirs, classifier,
name)
except (AttributeError, ImportError, SyntaxError) as e:
ex_str = traceback.format_exc()
self.print_('Failed to load "%s" in find_tests: %s' %
(name, e))
self.print_(' %s' %
'\n '.join(ex_str.splitlines()))
self.print_(ex_str)
return 1, None
except _AddTestsError as e:
self.print_(str(e))
return 1, None
# TODO: Add support for discovering setupProcess/teardownProcess?
shard_index = args.shard_index
total_shards = args.total_shards
assert total_shards >= 1
assert shard_index >= 0 and shard_index < total_shards, (
'shard_index (%d) must be >= 0 and < total_shards (%d)' %
(shard_index, total_shards))
test_set.parallel_tests = _sort_inputs(
test_set.parallel_tests)[shard_index::total_shards]
test_set.isolated_tests = _sort_inputs(
test_set.isolated_tests)[shard_index::total_shards]
test_set.tests_to_skip = _sort_inputs(
test_set.tests_to_skip)[shard_index::total_shards]
return 0, test_set
finally:
unittest.skip = orig_skip
unittest.skipIf = orig_skip_if
def _name_list_from_args(self, args):
if args.tests:
names = args.tests
elif args.file_list:
if args.file_list == '-':
s = self.host.stdin.read()
else:
s = self.host.read_text_file(args.file_list)
names = [line.strip() for line in s.splitlines()]
else:
names = self.top_level_dirs
return names
def _add_tests_to_set(self, test_set, suffixes, top_level_dirs, classifier,
name):
h = self.host
loader = self.loader
add_tests = _test_adder(test_set, classifier)
found = set()
for d in top_level_dirs:
if h.isfile(name):
rpath = h.relpath(name, d)
if rpath.startswith('..'):
continue
if rpath.endswith('.py'):
rpath = rpath[:-3]
module = rpath.replace(h.sep, '.')
if module not in found:
found.add(module)
add_tests(loader.loadTestsFromName(module))
elif h.isdir(name):
rpath = h.relpath(name, d)
if rpath.startswith('..'):
continue
for suffix in suffixes:
if not name in found:
found.add(name + '/' + suffix)
add_tests(loader.discover(name, suffix, d))
else:
possible_dir = name.replace('.', h.sep)
if h.isdir(d, possible_dir):
for suffix in suffixes:
path = h.join(d, possible_dir)
if not path in found:
found.add(path + '/' + suffix)
suite = loader.discover(path, suffix, d)
add_tests(suite)
elif not name in found:
found.add(name)
add_tests(loader.loadTestsFromName(
self.args.test_name_prefix + name))
# pylint: disable=no-member
if hasattr(loader, 'errors') and loader.errors: # pragma: python3
# In Python3's version of unittest, loader failures get converted
# into failed test cases, rather than raising exceptions. However,
# the errors also get recorded so you can err out immediately.
if isinstance(loader.errors, list):
raise ImportError('\n'.join(loader.errors))
raise ImportError(loader.errors)
def _run_tests(self, result_set, test_set, all_tests):
h = self.host
self.last_runs_retry_on_failure_tests = set()
def get_tests_to_retry(results):
# If the --retry-only-retry-on-failure-tests command line argument
# is passed , then a set of test failures with the RetryOnFailure
# expectation from the last run of tests will be returned. The
# self.last_runs_retry_on_failure_tests will be set to an empty set
# for the next run of tests. Otherwise all regressions from the
# last run will be returned.
if self.args.retry_only_retry_on_failure_tests:
ret = self.last_runs_retry_on_failure_tests.copy()
self.last_runs_retry_on_failure_tests = set()
return ret
else:
return json_results.regressions(results)
if len(test_set.parallel_tests):
jobs = min(
len(test_set.parallel_tests), self.args.jobs)
else:
jobs = 1
child = _Child(self)
pool_group = make_pool_group(h, jobs, self.args.stable_jobs,
_run_one_test, child, _setup_process,
_teardown_process,
self.args.use_global_pool)
pool_group.make_global_pool()
self._run_one_set(self.stats, result_set, test_set, jobs,
pool_group)
tests_to_retry = sorted(get_tests_to_retry(result_set))
retry_limit = self.args.retry_limit
try:
# Start at 1 since we already did iteration 0 above.
for iteration in range(1, self.args.retry_limit + 1):
if not tests_to_retry:
break
if retry_limit == self.args.retry_limit:
self.flush()
self.args.overwrite = False
self.printer.should_overwrite = False
self.args.verbose = min(self.args.verbose, 1)
self.print_('')
self.print_('Retrying failed tests (attempt #%d of %d)...' %
(iteration, self.args.retry_limit))
self.print_('')
stats = Stats(self.args.status_format, h.time, 1)
stats.total = len(tests_to_retry)
test_set = TestSet(self.args.test_name_prefix)
test_set.isolated_tests = [
TestInput(name,
iteration=iteration) for name in tests_to_retry]
tests_to_retry = test_set
retry_set = ResultSet()
self._run_one_set(stats, retry_set, tests_to_retry, 1,
pool_group)
result_set.results.extend(retry_set.results)
tests_to_retry = get_tests_to_retry(retry_set)
retry_limit -= 1
pool_group.close_global_pool()
finally:
self.final_responses.extend(pool_group.join_global_pool())
if retry_limit != self.args.retry_limit:
self.print_('')
full_results = json_results.make_full_results(self.metadata,
int(h.time()),
all_tests, result_set,
self.path_delimiter)
retcode = (json_results.exit_code_from_full_results(full_results)
| result_sink.result_sink_retcode_from_result_set(result_set))
return (retcode, full_results)
def _run_one_set(self, stats, result_set, test_set, jobs, pool_group):
self._skip_tests(stats, result_set, test_set.tests_to_skip)
pool = pool_group.make_parallel_pool()
try:
self._run_list(stats, result_set,
test_set.parallel_tests, jobs, pool)
pool_group.close_parallel_pool()
finally:
self.final_responses.extend(pool_group.join_parallel_pool())
pool = pool_group.make_serial_pool()
try:
self._run_list(stats, result_set,
test_set.isolated_tests, 1, pool)
pool_group.close_serial_pool()
finally:
self.final_responses.extend(pool_group.join_serial_pool())
def _skip_tests(self, stats, result_set, tests_to_skip):
for test_input in tests_to_skip:
last = self.host.time()
stats.started += 1
self._print_test_started(stats, test_input)
now = self.host.time()
result = Result(test_input.name, actual=ResultType.Skip,
started=last, took=(now - last), worker=0,
expected=[ResultType.Skip],
out=test_input.msg)
result_set.add(result)
stats.finished += 1
self._print_test_finished(stats, result)
def _run_list(self, stats, result_set, test_inputs, jobs, pool):
running_jobs = set()
while test_inputs or running_jobs:
while test_inputs and (len(running_jobs) < jobs):
test_input = test_inputs.pop(0)
stats.started += 1
pool.send(test_input)
running_jobs.add(test_input.name)
self._print_test_started(stats, test_input)
result, should_retry_on_failure = pool.get()
if result.is_regression:
stats.failed += 1
if (self.args.typ_max_failures is not None
and stats.failed >= self.args.typ_max_failures):
print('\nAborting, waiting for processes to close')
pool.close()
pool.join()
raise RuntimeError(
'Encountered %d failures with max of %d set, aborting.' % (
stats.failed, self.args.typ_max_failures))
if (self.args.retry_only_retry_on_failure_tests and
result.actual == ResultType.Failure and
should_retry_on_failure):
self.last_runs_retry_on_failure_tests.add(result.name)
running_jobs.remove(result.name)
result_set.add(result)
stats.finished += 1
self._print_test_finished(stats, result)
def _print_test_started(self, stats, test_input):
if self.args.quiet:
# Print nothing when --quiet was passed.
return
# If -vvv was passed, print when the test is queued to be run.
# We don't actually know when the test picked up to run, because
# that is handled by the child process (where we can't easily
# print things). Otherwise, only print when the test is started
# if we know we can overwrite the line, so that we do not
# get multiple lines of output as noise (in -vvv, we actually want
# the noise).
test_start_msg = stats.format() + test_input.name
if self.args.verbose > 2:
self.update(test_start_msg + ' queued', elide=False)
if self.args.overwrite:
self.update(test_start_msg, elide=(not self.args.verbose))
def _print_test_finished(self, stats, result):
stats.add_time()
assert result.actual in [ResultType.Failure, ResultType.Skip,
ResultType.Pass]
if result.actual == ResultType.Failure:
result_str = ' failed'
elif result.actual == ResultType.Skip:
result_str = ' was skipped'
elif result.actual == ResultType.Pass:
result_str = ' passed'
if result.unexpected:
result_str += ' unexpectedly'
elif result.actual == ResultType.Failure:
result_str += ' as expected'
if self.args.timing:
timing_str = ' %.4fs' % result.took
else:
timing_str = ''
suffix = '%s%s' % (result_str, timing_str)
out = result.out
err = result.err
if result.is_regression:
if out or err:
suffix += ':\n'
self.update(stats.format() + result.name + suffix, elide=False)
for l in out.splitlines():
self.print_(' %s' % l)
for l in err.splitlines():
self.print_(' %s' % l)
elif not self.args.quiet:
if self.args.verbose > 1 and (out or err):
suffix += ':\n'
self.update(stats.format() + result.name + suffix,
elide=(not self.args.verbose))
if self.args.verbose > 1:
for l in out.splitlines():
self.print_(' %s' % l)
for l in err.splitlines():
self.print_(' %s' % l)
if self.args.verbose:
self.flush()
def update(self, msg, elide):
self.printer.update(msg, elide)
def flush(self):
self.printer.flush()
def _summarize(self, full_results):
num_passes = json_results.num_passes(full_results)
num_failures = json_results.num_failures(full_results)
num_skips = json_results.num_skips(full_results)
num_regressions = json_results.num_regressions(full_results)
if self.args.quiet and num_failures == 0:
return
if self.args.timing:
timing_clause = ' in %.1fs' % (self.host.time() -
self.stats.started_time)
else:
timing_clause = ''
self.update('%d test%s passed%s, %d skipped, %d failure%s.' %
(num_passes,
'' if num_passes == 1 else 's',
timing_clause,
num_skips,
num_failures,
'' if num_failures == 1 else 's'), elide=False)
self.print_()
if num_failures or num_regressions:
regressed_tests = json_results.regressed_tests_names(full_results)
failed_tests = json_results.failed_tests_names(full_results)
expected_failed_tests = failed_tests - regressed_tests
regressed_tests = sorted(list(regressed_tests))
expected_failed_tests = sorted(list(expected_failed_tests))
if expected_failed_tests:
self.update('Tests that failed as expected:\n', elide=False)
for t in expected_failed_tests:
self.print_(' %s' % t)
if regressed_tests:
self.update('Tests that regressed (failed unexpectedly)\n', elide=False)
for t in regressed_tests:
self.print_(' %s' % t)
def _read_and_delete(self, path, delete):
h = self.host
obj = None
if h.exists(path):
contents = h.read_text_file(path)
if contents:
obj = json.loads(contents)
if delete:
h.remove(path)
return obj
def _write(self, path, obj):
if path:
self.host.write_text_file(path, json.dumps(obj, indent=2) + '\n')
def _upload(self, full_results):
h = self.host
if not self.args.test_results_server:
return 0
url, content_type, data = json_results.make_upload_request(
self.args.test_results_server, self.args.builder_name,
self.args.master_name, self.args.test_type,
full_results)
try:
h.fetch(url, data, {'Content-Type': content_type})
return 0
except Exception as e:
h.print_('Uploading the JSON results raised "%s"' % str(e))
return 1
def report_coverage(self): # pragma: no cover
self.host.print_()
import coverage
cov = coverage.Coverage(data_suffix=True)
cov.combine()
percentage = cov.report(show_missing=self.args.coverage_show_missing,
omit=self.args.coverage_omit)
if self.args.coverage_annotate:
cov.annotate(omit=self.args.coverage_omit)
# https://coverage.readthedocs.io/en/6.4.2/config.html#report-fail-under
return 2 if percentage < cov.get_option('report:fail_under') else 0
def _add_trace_event(self, trace, name, start, end):
event = {
'name': name,
'ts': int((start - self.stats.started_time) * 1000000),
'dur': int((end - start) * 1000000),
'ph': 'X',
'pid': self.host.getpid(),
'tid': 0,
}
trace['traceEvents'].append(event)
def _trace_from_results(self, result_set):
trace = OrderedDict()
trace['traceEvents'] = []
trace['otherData'] = {}
if self.metadata:
trace['otherData'] = self.metadata
for result in result_set.results:
started = int((result.started - self.stats.started_time) * 1000000)
took = int(result.took * 1000000)
event = OrderedDict()
event['name'] = result.name
event['dur'] = took
event['ts'] = started
event['ph'] = 'X' # "Complete" events
event['pid'] = result.pid
event['tid'] = result.worker
args = OrderedDict()
args['expected'] = sorted(str(r) for r in result.expected)
args['actual'] = str(result.actual)
args['out'] = result.out
args['err'] = result.err
args['code'] = result.code
args['unexpected'] = result.unexpected
args['flaky'] = result.flaky
args['file'] = result.file_path
args['line'] = result.line_number
event['args'] = args
trace['traceEvents'].append(event)
return trace
def expectations_for(self, test_case):
expectations = self.expectations if self.has_expectations else None
return _expectations_for(
test_case, expectations, self.args.test_name_prefix)
def default_classifier(self, test_set, test):
if self.matches_filter(test):
if self.should_skip(test):
test_set.add_test_to_skip(test, 'skipped by request')
elif self.should_isolate(test):
test_set.add_test_to_run_isolated(test)
else:
test_set.add_test_to_run_in_parallel(test)
def matches_filter(self, test_case):
_validate_test_starts_with_prefix(
self.args.test_name_prefix, test_case.id())
test_name = test_case.id()[len(self.args.test_name_prefix):]
if self.args.test_filter:
return any(
fnmatch.fnmatch(test_name, glob)
for glob in self.args.test_filter.split('::'))
if self.args.partial_match_filter:
return any(
substr in test_name
for substr in self.args.partial_match_filter)
return True
def should_isolate(self, test_case):
_validate_test_starts_with_prefix(
self.args.test_name_prefix, test_case.id())
test_name = test_case.id()[len(self.args.test_name_prefix):]
return any(fnmatch.fnmatch(test_name, glob)
for glob in self.args.isolate)
def should_skip(self, test_case):
_validate_test_starts_with_prefix(
self.args.test_name_prefix, test_case.id())
if self.args.all:
return False
test_name = test_case.id()[len(self.args.test_name_prefix):]
if self.has_expectations:
expected_results = self.expectations.expectations_for(test_name).results
else:
expected_results = {ResultType.Pass}
return (
ResultType.Skip in expected_results or
any(fnmatch.fnmatch(test_name, glob) for glob in self.args.skip))
def _test_adder(test_set, classifier):
def add_tests(obj):
if isinstance(obj, unittest.suite.TestSuite):
for el in obj:
add_tests(el)
elif (obj.id().startswith('unittest.loader.LoadTestsFailure') or
obj.id().startswith('unittest.loader.ModuleImportFailure')):
# Access to protected member pylint: disable=W0212
module_name = obj._testMethodName
try:
method = getattr(obj, obj._testMethodName)
method()
except Exception as e:
if 'LoadTests' in obj.id():
raise _AddTestsError('%s.load_tests() failed: %s'
% (module_name, str(e)))
else:
raise _AddTestsError(str(e))
else:
assert isinstance(obj, unittest.TestCase)
classifier(test_set, obj)
return add_tests
class _Child(object):
def __init__(self, parent):
self.host = None
self.worker_num = None
self.all = parent.args.all
self.debugger = parent.args.debugger
self.coverage = parent.args.coverage and parent.args.jobs > 1
self.coverage_source = parent.coverage_source
self.dry_run = parent.args.dry_run
self.loader = parent.loader
self.passthrough = parent.args.passthrough
self.context = parent.context
self.setup_fn = parent.setup_fn
self.teardown_fn = parent.teardown_fn
self.context_after_setup = None
self.top_level_dir = parent.top_level_dir
self.top_level_dirs = parent.top_level_dirs
self.loaded_suites = {}
self.cov = None
self.has_expectations = parent.has_expectations
self.expectations = parent.expectations
self.test_name_prefix = parent.args.test_name_prefix
self.artifact_output_dir = parent.artifact_output_dir
self.result_sink_reporter = None
self.disable_resultsink = parent.args.disable_resultsink
self.jobs = parent.args.jobs
def expectations_for(self, test_case):
expectations = self.expectations if self.has_expectations else None
return _expectations_for(test_case, expectations, self.test_name_prefix)
def _setup_process(host, worker_num, child):
child.host = host
child.result_sink_reporter = result_sink.ResultSinkReporter(
host, child.disable_resultsink)
child.worker_num = worker_num
# pylint: disable=protected-access
if child.coverage: # pragma: no cover
import coverage
child.cov = coverage.Coverage(source=child.coverage_source,
data_suffix=True)
child.cov._warn_no_data = False
child.cov.start()
if child.setup_fn:
child.context_after_setup = child.setup_fn(child, child.context)
else:
child.context_after_setup = child.context
return child
def _teardown_process(child):
res = None
exc = None
if child.teardown_fn:
try:
res = child.teardown_fn(child, child.context_after_setup)
except Exception as e:
exc = e
pass
if child.cov: # pragma: no cover
child.cov.stop()
child.cov.save()
return (child.worker_num, res, exc)
def _run_one_test(child, test_input):
h = child.host
pid = h.getpid()
test_name = test_input.name
started = h.time()
# It is important to capture the output before loading the test
# to ensure that
# 1) the loader doesn't logs something we don't captured
# 2) neither the loader nor the test case grab a reference to the
# uncaptured stdout or stderr that later is used when the test is run.
# This comes up when using the FakeTestLoader and testing typ itself,
# but could come up when testing non-typ code as well.
h.capture_output(divert=not child.passthrough)
if child.has_expectations:
expectation = child.expectations.expectations_for(test_name)
expected_results, should_retry_on_failure = (
expectation.results, expectation.should_retry_on_failure)
else:
expected_results, should_retry_on_failure = {ResultType.Pass}, False
ex_str = ''
try:
orig_skip = unittest.skip
orig_skip_if = unittest.skipIf
if child.all:
unittest.skip = lambda reason: lambda x: x
unittest.skipIf = lambda condition, reason: lambda x: x
elif ResultType.Skip in expected_results:
h.restore_output()
return (Result(test_name, ResultType.Skip, started, 0,
child.worker_num, expected=expected_results,
unexpected=False, pid=pid), False)
test_name_to_load = child.test_name_prefix + test_name
try:
# If we have errors around from before, clear them now so we don't
# attempt to handle them later.
if hasattr(child.loader, 'errors') and child.loader.errors:
child.loader.errors.clear()
suite = child.loader.loadTestsFromName(test_name_to_load)
# From Python 3.5, AttributeError will not be thrown when calling
# LoadTestsFromName. Instead, it adds error messages in the loader.
# As a result, the original handling cannot kick in properly. We
# now check the error message and throw exception as needed.
if hasattr(child.loader, 'errors') and child.loader.errors:
if isinstance(child.loader.errors, list):
raise AttributeError('\n'.join(child.loader.errors))
raise AttributeError(child.loader.errors)
except Exception as e:
ex_str = ('loadTestsFromName("%s") failed: %s\n%s\n' %
(test_name_to_load, e, traceback.format_exc()))
try:
suite = _load_via_load_tests(child, test_name_to_load)
ex_str += ('\nload_via_load_tests(\"%s\") returned %d tests\n' %
(test_name_to_load, len(list(suite))))
except Exception as e: # pragma: untested
suite = []
ex_str += ('\nload_via_load_tests("%s") failed: %s\n%s\n' %
(test_name_to_load, e, traceback.format_exc()))
finally:
unittest.skip = orig_skip
unittest.skipIf = orig_skip_if
tests = list(suite)
if len(tests) != 1:
err = 'Failed to load "%s" in run_one_test' % test_name
if ex_str: # pragma: untested
err += '\n ' + '\n '.join(ex_str.splitlines())
h.restore_output()
return (Result(test_name, ResultType.Failure, started, took=0,
worker=child.worker_num, unexpected=True, code=1,
err=err, pid=pid), False)
art = artifacts.Artifacts(
child.artifact_output_dir, h, test_input.iteration, test_name)
test_case = tests[0]
if isinstance(test_case, TypTestCase):
test_case.child = child
test_case.context = child.context_after_setup
test_case.set_artifacts(art)
test_result = unittest.TestResult()
out = ''
err = ''
try:
if child.dry_run:
pass
elif child.debugger: # pragma: no cover
_run_under_debugger(h, test_case, suite, test_result)
else:
suite.run(test_result)
finally:
out, err = h.restore_output()
# Clear the artifact implementation so that later tests don't try to
# use a stale instance.
if isinstance(test_case, TypTestCase):
test_case.set_artifacts(None)
took = h.time() - started
additional_tags = None
test_location = inspect.getsourcefile(test_case.__class__)
test_method = getattr(test_case, test_case._testMethodName)
# Test methods are often wrapped by decorators such as @mock. Try to get to
# the actual test method instead of the wrapper.
if hasattr(test_method, '__wrapped__'):
test_method = test_method.__wrapped__
# Some tests are generated and don't have valid line numbers. Such test
# methods also have a source location different from module location.
if inspect.getsourcefile(test_method) == test_location:
test_line = inspect.getsourcelines(test_method)[1]
else:
test_line = None
# If the test signaled that it should be retried on failure, do so.
if isinstance(test_case, TypTestCase):
additional_tags = test_case.additionalTags
# Handle the case where the test called self.skipTest, e.g. if it
# determined that the test is not valid on the current configuration.
if test_result.skipped and test_case.programmaticSkipIsExpected:
result = Result(test_name, ResultType.Skip, started, took,
child.worker_num, expected={ResultType.Skip},
unexpected=False, pid=pid)
result.result_sink_retcode =\
child.result_sink_reporter.report_individual_test_result(
result, child.artifact_output_dir, child.expectations,
test_location, test_line, child.test_name_prefix,
additional_tags)
return (result, False)
should_retry_on_failure = (should_retry_on_failure
or test_case.retryOnFailure)
result = _result_from_test_result(test_result, test_name, started, took, out,
err, child.worker_num, pid, test_case,
expected_results, child.has_expectations,
art.artifacts)
result.result_sink_retcode =\
child.result_sink_reporter.report_individual_test_result(
result, child.artifact_output_dir, child.expectations,
test_location, test_line, child.test_name_prefix,
additional_tags)
return (result, should_retry_on_failure)
def _run_under_debugger(host, test_case, suite,
test_result): # pragma: no cover
# Access to protected member pylint: disable=W0212
test_func = getattr(test_case, test_case._testMethodName)
fname = inspect.getsourcefile(test_func)
lineno = inspect.getsourcelines(test_func)[1] + 1
dbg = pdb.Pdb(stdout=host.stdout.stream)
dbg.set_break(fname, lineno)
dbg.runcall(suite.run, test_result)
def _result_from_test_result(test_result, test_name, started, took, out, err,
worker_num, pid, test_case, expected_results,
has_expectations, artifacts):
failure_reason = None
if test_result.failures:
actual = ResultType.Failure
code = 1
err = err + test_result.failures[0][1]
unexpected = actual not in expected_results
for i, failure in enumerate(test_result.failures):
if failure_reason is None:
failure_reason = _failure_reason_from_traceback(failure[1])
elif test_result.errors:
actual = ResultType.Failure
code = 1
err = err + test_result.errors[0][1]
unexpected = actual not in expected_results
for i, error in enumerate(test_result.errors):
if failure_reason is None:
failure_reason = _failure_reason_from_traceback(error[1])
elif test_result.skipped:
actual = ResultType.Skip
err = err + test_result.skipped[0][1]
code = 0
if has_expectations:
unexpected = actual not in expected_results
else:
unexpected = False
expected_results = {ResultType.Skip}
elif test_result.expectedFailures:
actual = ResultType.Failure
code = 1
err = err + test_result.expectedFailures[0][1]
unexpected = False
elif test_result.unexpectedSuccesses:
actual = ResultType.Pass
code = 0
unexpected = True
else:
actual = ResultType.Pass
code = 0
unexpected = actual not in expected_results
flaky = False
test_func = getattr(test_case, test_case._testMethodName)
test_func = getattr(test_func, 'real_test_func', test_func)
file_path = inspect.getsourcefile(test_func)
line_number = inspect.getsourcelines(test_func)[1]
return Result(test_name, actual, started, took, worker_num,
expected_results, unexpected, flaky, code, out, err, pid,
file_path, line_number, artifacts, failure_reason)
def _failure_reason_from_traceback(traceback):
"""Attempts to extract a failure reason from formatted Traceback data.
The formatted traceback data handled by this method is that populated on
unittest.TestResult objects in the errors and/or failures attribute(s).
We reverse this formatting process to obtain the underlying failure
exception message or assertion failure, excluding stacktrace and other
data.
When reading this method, it is useful to read python unittest sources
at the same time, as this reverses some of the formatting defined there.
https://github.com/python/cpython/blob/3.10/Lib/unittest/result.py#L119
https://github.com/python/cpython/blob/3.10/Lib/unittest/result.py#L173
https://github.com/python/cpython/blob/3.10/Lib/traceback.py#L652
This method may not succeed in extracting a failure reason. In this case,
it returns None.
"""
lines = traceback.splitlines()
# Start line index of the interesting region (the line(s) that has
# the assertion failure or exception emssage).
start_index = 0
# End index of the interesting region.
end_index = len(lines)
# The file name and line that raised the exception or assertion failure.
# Formatted as "filename.py(123)".
context_file_line = None
in_traceback = False
for i, line in enumerate(lines):
# Tracebacks precede the interesting message. It is possible
# for there to be multiple tracebacks blocks in case of chained
# exceptions. E.g. "While handling a XYZError, the following
# exception was also raised:". The interesting message is
# after all such chained stacks.
if line == 'Traceback (most recent call last):':
in_traceback = True
start_index = i + 1
context_file_line = None
elif line.startswith(' ') and in_traceback:
# Continuation of traceback.
start_index = i + 1
# Keep track of the last file in the traceback.
file_match = _TRACEBACK_FILE_RE.match(line)
if file_match:
context_file_line = '{}({})'.format(
file_match.group(1),
file_match.group(2))
else:
in_traceback = False
# The "Stdout:" or "Stderr:" blocks (if present) are after the
# interesting failure message.
if line == 'Stdout:' or line == 'Stderr:':
if i < end_index:
end_index = i
interesting_lines = lines[start_index:end_index]
if len(interesting_lines) > 0 and context_file_line is not None:
# Let the failure reason be look like:
# "my_unittest.py(123): AssertionError: unexpectedly None".
#
# We include the file and line of the original exception
# in failure reason, as basic assertion failures
# (true != false, None is not None, etc.) can be too generic
# to be clustered in a useful way without this.
message = '{}: {}'.format(context_file_line,
'\n'.join(interesting_lines).strip())
return FailureReason(message)
return None
def _load_via_load_tests(child, test_name):
# If we couldn't import a test directly, the test may be only loadable
# via unittest's load_tests protocol. See if we can find a load_tests
# entry point that will work for this test.
loader = child.loader
comps = test_name.split('.')
new_suite = unittest.TestSuite()
while comps:
name = '.'.join(comps)
module = None
suite = None
if name not in child.loaded_suites:
try:
module = importlib.import_module(name)
except ImportError:
pass
if module:
suite = loader.loadTestsFromModule(module)
child.loaded_suites[name] = suite
suite = child.loaded_suites[name]
if suite:
for test_case in suite:
assert isinstance(test_case, unittest.TestCase)
if test_case.id() == test_name: # pragma: untested
new_suite.addTest(test_case)
break
comps.pop()
return new_suite
def _sort_inputs(inps):
return sorted(inps, key=lambda inp: inp.name)
def _expectations_for(test_case, expectations, test_name_prefix):
test_name = test_case.id()[len(test_name_prefix):]
if expectations:
return expectations.expectations_for(test_name)
else:
return Expectation(test=test_name)
if __name__ == '__main__': # pragma: no cover
sys.modules['__main__'].__file__ = path_to_file
sys.exit(main(win_multiprocessing=WinMultiprocessing.importable))
|
{
"content_hash": "0e9c0732cc1563bd455ff8d63516d268",
"timestamp": "",
"source": "github",
"line_count": 1371,
"max_line_length": 88,
"avg_line_length": 39.31582786287382,
"alnum_prop": 0.5660828911728693,
"repo_name": "catapult-project/catapult",
"id": "dd5a07e969a870047910161c75f5f48c2ac7151c",
"size": "54498",
"binary": false,
"copies": "6",
"ref": "refs/heads/main",
"path": "third_party/typ/typ/runner.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "C",
"bytes": "1324"
},
{
"name": "C++",
"bytes": "46069"
},
{
"name": "CSS",
"bytes": "23376"
},
{
"name": "Dockerfile",
"bytes": "1541"
},
{
"name": "Go",
"bytes": "114396"
},
{
"name": "HTML",
"bytes": "12394298"
},
{
"name": "JavaScript",
"bytes": "1559584"
},
{
"name": "Makefile",
"bytes": "1774"
},
{
"name": "Python",
"bytes": "6778695"
},
{
"name": "Shell",
"bytes": "2288"
}
],
"symlink_target": ""
}
|
import json
from decimal import Decimal
from functools import partial
from unittest.mock import ANY
from freezegun import freeze_time
from prices import Money, fixed_discount
from .....core.prices import quantize_price
from .....discount import DiscountValueType, VoucherType
from .....graphql.core.utils import to_global_id_or_none
from .....order.models import Order
from .....tests.fixtures import recalculate_order
from .....webhook.event_types import WebhookEventSyncType
from .....webhook.models import Webhook
from ...tasks import create_delivery_for_subscription_sync_event
TAXES_SUBSCRIPTION_QUERY = """
subscription {
event {
__typename
... on CalculateTaxes {
taxBase {
pricesEnteredWithTax
currency
shippingPrice {
amount
}
address {
id
}
channel {
id
}
discounts {
amount {
amount
}
}
lines {
quantity
chargeTaxes
productName
variantName
productSku
unitPrice {
amount
}
totalPrice {
amount
}
sourceLine {
__typename
... on CheckoutLine {
id
}
... on OrderLine {
id
}
}
}
sourceObject {
__typename
... on Checkout {
id
}
... on Order {
id
}
}
}
}
}
}
"""
@freeze_time("2020-03-18 12:00:00")
def test_checkout_calculate_taxes(
checkout_ready_to_complete,
webhook_app,
permission_handle_taxes,
):
# given
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(
event_type, checkout_ready_to_complete, webhook
)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {
"id": to_global_id_or_none(checkout_ready_to_complete.shipping_address)
},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(checkout_ready_to_complete.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": "Test product",
"productSku": "123",
"quantity": 3,
"sourceLine": {
"id": to_global_id_or_none(
checkout_ready_to_complete.lines.first()
),
"__typename": "CheckoutLine",
},
"totalPrice": {"amount": 30.0},
"unitPrice": {"amount": 10.0},
"variantName": "",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 10.0},
"sourceObject": {
"id": to_global_id_or_none(checkout_ready_to_complete),
"__typename": "Checkout",
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_checkout_calculate_taxes_with_free_shipping_voucher(
checkout_with_voucher_free_shipping,
webhook_app,
permission_handle_taxes,
):
# given
checkout = checkout_with_voucher_free_shipping
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(
event_type, checkout, webhook
)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(checkout.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(checkout.channel)},
"lines": ANY,
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {
"id": to_global_id_or_none(checkout),
"__typename": "Checkout",
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_checkout_calculate_taxes_with_voucher(
checkout_with_voucher,
webhook_app,
permission_handle_taxes,
):
# given
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(
event_type, checkout_with_voucher, webhook
)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": None,
"currency": "USD",
"discounts": [{"amount": {"amount": 20.0}}],
"channel": {"id": to_global_id_or_none(checkout_with_voucher.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": "Test product",
"productSku": "123",
"quantity": 3,
"sourceLine": {
"id": to_global_id_or_none(checkout_with_voucher.lines.first()),
"__typename": "CheckoutLine",
},
"totalPrice": {"amount": 30.0},
"unitPrice": {"amount": 10.0},
"variantName": "",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {
"id": to_global_id_or_none(checkout_with_voucher),
"__typename": "Checkout",
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_checkout_calculate_taxes_with_shipping_voucher(
checkout_with_voucher,
voucher,
webhook_app,
permission_handle_taxes,
):
# given
voucher.type = VoucherType.SHIPPING
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(
event_type, checkout_with_voucher, webhook
)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": None,
"currency": "USD",
"discounts": [{"amount": {"amount": 20.0}}],
"channel": {"id": to_global_id_or_none(checkout_with_voucher.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": "Test product",
"productSku": "123",
"quantity": 3,
"sourceLine": {
"id": to_global_id_or_none(checkout_with_voucher.lines.first()),
"__typename": "CheckoutLine",
},
"totalPrice": {"amount": 30.0},
"unitPrice": {"amount": 10.0},
"variantName": "",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {
"id": to_global_id_or_none(checkout_with_voucher),
"__typename": "Checkout",
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_checkout_calculate_taxes_empty_checkout(
checkout,
webhook_app,
permission_handle_taxes,
):
# given
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.CHECKOUT_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(
event_type, checkout, webhook
)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": None,
"channel": {"id": to_global_id_or_none(checkout.channel)},
"currency": "USD",
"discounts": [],
"lines": [],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {
"id": to_global_id_or_none(checkout),
"__typename": "Checkout",
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes(
order_line, webhook_app, permission_handle_taxes, shipping_zone
):
# given
order = order_line.order
expected_shipping_price = Money("2.00", order.currency)
order.base_shipping_price = expected_shipping_price
order.save()
shipping_method = shipping_zone.shipping_methods.first()
order.shipping_method = shipping_method
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
shipping_price_amount = shipping_method.channel_listings.get(
channel=order.channel
).price.amount
shipping_price_amount = quantize_price(shipping_price_amount, order.currency)
assert expected_shipping_price != shipping_price_amount
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": "Test product",
"productSku": "SKU_A",
"quantity": 3,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(order_line),
},
"totalPrice": {"amount": 36.9},
"unitPrice": {"amount": 12.3},
"variantName": "SKU_A",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": expected_shipping_price.amount},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes_with_discounts(
order_line,
webhook_app,
permission_handle_taxes,
):
# given
order = order_line.order
order.total = order_line.total_price + order.shipping_price
order.undiscounted_total = order.total
order.save()
value = Decimal("20")
discount = partial(fixed_discount, discount=Money(value, order.currency))
order.total = discount(order.total)
order.save()
order.discounts.create(
value_type=DiscountValueType.FIXED,
value=value,
reason="Discount reason",
amount=(order.undiscounted_total - order.total).gross, # type: ignore
)
recalculate_order(order)
order.refresh_from_db()
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": {"id": to_global_id_or_none(order.shipping_address)},
"currency": "USD",
"discounts": [{"amount": {"amount": 20.0}}],
"channel": {"id": to_global_id_or_none(order.channel)},
"lines": [
{
"chargeTaxes": True,
"productName": "Test product",
"productSku": "SKU_A",
"quantity": 3,
"sourceLine": {
"__typename": "OrderLine",
"id": to_global_id_or_none(order_line),
},
"totalPrice": {"amount": 36.9},
"unitPrice": {"amount": 12.3},
"variantName": "SKU_A",
}
],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"sourceObject": {"__typename": "Order", "id": to_global_id_or_none(order)},
},
}
@freeze_time("2020-03-18 12:00:00")
def test_order_calculate_taxes_empty_order(
order, webhook_app, permission_handle_taxes, channel_USD
):
# given
order = Order.objects.create(channel=channel_USD, currency="USD")
webhook_app.permissions.add(permission_handle_taxes)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook = Webhook.objects.create(
name="Webhook",
app=webhook_app,
target_url="http://www.example.com/any",
subscription_query=TAXES_SUBSCRIPTION_QUERY,
)
event_type = WebhookEventSyncType.ORDER_CALCULATE_TAXES
webhook.events.create(event_type=event_type)
# when
deliveries = create_delivery_for_subscription_sync_event(event_type, order, webhook)
# then
assert json.loads(deliveries.payload.payload) == {
"__typename": "CalculateTaxes",
"taxBase": {
"address": None,
"currency": "USD",
"discounts": [],
"lines": [],
"pricesEnteredWithTax": True,
"shippingPrice": {"amount": 0.0},
"channel": {"id": to_global_id_or_none(order.channel)},
"sourceObject": {
"__typename": "Order",
"id": to_global_id_or_none(order),
},
},
}
|
{
"content_hash": "f1c9c8c0a012117f905d876743ce883e",
"timestamp": "",
"source": "github",
"line_count": 515,
"max_line_length": 88,
"avg_line_length": 31.506796116504855,
"alnum_prop": 0.5433255269320844,
"repo_name": "mociepka/saleor",
"id": "be4d63839c01f2992d19ae700e6afc76963b95e9",
"size": "16226",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "saleor/plugins/webhook/tests/subscription_webhooks/test_create_deliveries_for_taxes.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Dockerfile",
"bytes": "2228"
},
{
"name": "HTML",
"bytes": "249248"
},
{
"name": "Procfile",
"bytes": "290"
},
{
"name": "Python",
"bytes": "12686831"
},
{
"name": "Shell",
"bytes": "439"
}
],
"symlink_target": ""
}
|
"""This script shows an example of using the PyWavefront module."""
import sys
sys.path.append('..')
import ctypes
import pyglet
from pyglet.gl import *
import pywavefront
rotation = 0
meshes = pywavefront.Wavefront('uv_sphere.obj')
window = pyglet.window.Window()
lightfv = ctypes.c_float * 4
@window.event
def on_resize(width, height):
glMatrixMode(GL_PROJECTION)
glLoadIdentity()
gluPerspective(60., float(width)/height, 1., 100.)
glMatrixMode(GL_MODELVIEW)
return True
@window.event
def on_draw():
window.clear()
glLoadIdentity()
glLightfv(GL_LIGHT0, GL_POSITION, lightfv(-1.0, 1.0, 1.0, 0.0))
glEnable(GL_LIGHT0)
glTranslated(0, 0, -3)
glRotatef(rotation, 0, 1, 0)
glRotatef(-25, 1, 0, 0)
glRotatef(45, 0, 0, 1)
glEnable(GL_LIGHTING)
meshes.draw()
def update(dt):
global rotation
rotation += 90*dt
if rotation > 720: rotation = 0
pyglet.clock.schedule(update)
pyglet.app.run()
|
{
"content_hash": "585a108d8106f397d94faaa2841095db",
"timestamp": "",
"source": "github",
"line_count": 50,
"max_line_length": 67,
"avg_line_length": 19.36,
"alnum_prop": 0.6735537190082644,
"repo_name": "kitchenknif/PyWavefront",
"id": "0a2ca8938340ea2e32f0be4923a841b96de61868",
"size": "990",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "example/pyglet_demo.py",
"mode": "33261",
"license": "bsd-3-clause",
"language": [
{
"name": "Python",
"bytes": "31803"
}
],
"symlink_target": ""
}
|
from core.himesis import Himesis, HimesisPreConditionPatternLHS
import cPickle as pickle
from uuid import UUID
class HMoveOneTraceBackLinkDiffRulesLHS(HimesisPreConditionPatternLHS):
def __init__(self):
"""
Creates the himesis graph representing the AToM3 model HMoveOneTraceBackLinkDiffRulesLHS.
"""
# Flag this instance as compiled now
self.is_compiled = True
super(HMoveOneTraceBackLinkDiffRulesLHS, self).__init__(name='HMoveOneTraceBackLinkDiffRulesLHS', num_nodes=5, edges=[])
# Add the edges
self.add_edges([(0, 2), (0, 1), (1, 4), (2, 4)])
# Set the graph attributes
self["mm__"] = pickle.loads("""(lp1
S'MT_pre__GM2AUTOSAR_MM'
p2
aS'MoTifRule'
p3
a.""")
self["MT_constraint__"] = """#if len([i for i in graph.neighbors(PreNode('9').index) if graph.vs[i]['mm__'] == 'apply_contains']) == 0:
# return True
#return False
return True
"""
self["name"] = """"""
self["GUID__"] = UUID('9d5e90a5-20d5-4780-a879-080e1256c6c4')
# Set the node attributes
self.vs[0]["MT_subtypeMatching__"] = True
self.vs[0]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_label__"] = """9"""
self.vs[0]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__EcuInstance'
p2
aS'MT_pre__System'
p3
aS'MT_pre__SystemMapping'
p4
aS'MT_pre__ComponentPrototype'
p5
aS'MT_pre__SwCompToEcuMapping_component'
p6
aS'MT_pre__CompositionType'
p7
aS'MT_pre__PPortPrototype'
p8
aS'MT_pre__SwcToEcuMapping'
p9
aS'MT_pre__SoftwareComposition'
p10
aS'MT_pre__RPortPrototype'
p11
aS'MT_pre__PortPrototype'
p12
aS'MT_pre__ComponentType'
p13
a.""")
self.vs[0]["MT_dirty__"] = False
self.vs[0]["mm__"] = """MT_pre__MetaModelElement_T"""
self.vs[0]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[0]["GUID__"] = UUID('56fb1756-4968-4e48-8179-00e817afae67')
self.vs[1]["MT_subtypeMatching__"] = False
self.vs[1]["MT_label__"] = """11"""
self.vs[1]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[1]["MT_dirty__"] = False
self.vs[1]["mm__"] = """MT_pre__trace_link"""
self.vs[1]["GUID__"] = UUID('39eee072-ed8b-46ab-9438-8f1d09fd3819')
self.vs[2]["MT_subtypeMatching__"] = False
self.vs[2]["MT_pre__type"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[2]["MT_label__"] = """10"""
self.vs[2]["MT_subtypes__"] = pickle.loads("""(lp1
.""")
self.vs[2]["MT_dirty__"] = False
self.vs[2]["mm__"] = """MT_pre__backward_link"""
self.vs[2]["GUID__"] = UUID('da323d85-f852-4293-8f4f-991e34ed84f9')
self.vs[3]["MT_pivotOut__"] = """element1"""
self.vs[3]["MT_subtypeMatching__"] = True
self.vs[3]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_pivotIn__"] = """element1"""
self.vs[3]["MT_label__"] = """7"""
self.vs[3]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__VirtualDevice'
p2
aS'MT_pre__Distributable'
p3
aS'MT_pre__Signal'
p4
aS'MT_pre__ExecFrame'
p5
aS'MT_pre__ECU'
p6
a.""")
self.vs[3]["MT_dirty__"] = False
self.vs[3]["mm__"] = """MT_pre__MetaModelElement_S"""
self.vs[3]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[3]["GUID__"] = UUID('fad042ad-67d6-4c06-9d60-68b398dc0312')
self.vs[4]["MT_pivotOut__"] = """element2"""
self.vs[4]["MT_subtypeMatching__"] = True
self.vs[4]["MT_pre__classtype"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_pivotIn__"] = """element2"""
self.vs[4]["MT_label__"] = """8"""
self.vs[4]["MT_subtypes__"] = pickle.loads("""(lp1
S'MT_pre__VirtualDevice'
p2
aS'MT_pre__Distributable'
p3
aS'MT_pre__Signal'
p4
aS'MT_pre__ExecFrame'
p5
aS'MT_pre__ECU'
p6
a.""")
self.vs[4]["MT_dirty__"] = False
self.vs[4]["mm__"] = """MT_pre__MetaModelElement_S"""
self.vs[4]["MT_pre__cardinality"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["MT_pre__name"] = """
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
"""
self.vs[4]["GUID__"] = UUID('093b40e9-a978-461c-b56a-f91521eb5837')
def eval_classtype9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name9(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name7(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_classtype8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_cardinality8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_name8(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def eval_type10(self, attr_value, this):
#===============================================================================
# This code is executed when evaluating if a node shall be matched by this rule.
# You can access the value of the current node's attribute value by: attr_value.
# You can access any attribute x of this node by: this['x'].
# If the constraint relies on attribute values from other nodes,
# use the LHS/NAC constraint instead.
# The given constraint must evaluate to a boolean expression.
#===============================================================================
return True
def constraint(self, PreNode, graph):
"""
Executable constraint code.
@param PreNode: Function taking an integer as parameter
and returns the node corresponding to that label.
"""
#if len([i for i in graph.neighbors(PreNode('9').index) if graph.vs[i]['mm__'] == 'apply_contains']) == 0:
# return True
#return False
return True
|
{
"content_hash": "20a0f7ba4179ee2463c9679233818a3f",
"timestamp": "",
"source": "github",
"line_count": 389,
"max_line_length": 143,
"avg_line_length": 44.18766066838046,
"alnum_prop": 0.5246960265285938,
"repo_name": "levilucio/SyVOLT",
"id": "ab172dd5b6c40fdbd07a0b6923e5ad4fa601d772",
"size": "17191",
"binary": false,
"copies": "2",
"ref": "refs/heads/master",
"path": "GM2AUTOSAR_MM/merge_inter_layer_rules/Himesis/HMoveOneTraceBackLinkDiffRulesLHS.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Assembly",
"bytes": "166159"
},
{
"name": "Python",
"bytes": "34207588"
},
{
"name": "Shell",
"bytes": "1118"
}
],
"symlink_target": ""
}
|
"""
Sensor for checking the air quality forecast around Norway.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/air_quality.norway_air/
"""
import logging
from datetime import timedelta
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.air_quality import (
PLATFORM_SCHEMA, AirQualityEntity)
from homeassistant.const import (CONF_LATITUDE, CONF_LONGITUDE,
CONF_NAME)
from homeassistant.helpers.aiohttp_client import async_get_clientsession
REQUIREMENTS = ['pyMetno==0.4.6']
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Air quality from " \
"https://luftkvalitet.miljostatus.no/, " \
"delivered by the Norwegian Meteorological Institute."
# https://api.met.no/license_data.html
CONF_FORECAST = 'forecast'
DEFAULT_FORECAST = 0
DEFAULT_NAME = 'Air quality Norway'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_FORECAST, default=DEFAULT_FORECAST): vol.Coerce(int),
vol.Optional(CONF_LATITUDE): cv.latitude,
vol.Optional(CONF_LONGITUDE): cv.longitude,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
})
SCAN_INTERVAL = timedelta(minutes=5)
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up the air_quality norway sensor."""
forecast = config.get(CONF_FORECAST)
latitude = config.get(CONF_LATITUDE, hass.config.latitude)
longitude = config.get(CONF_LONGITUDE, hass.config.longitude)
name = config.get(CONF_NAME)
if None in (latitude, longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return
coordinates = {
'lat': str(latitude),
'lon': str(longitude),
}
async_add_entities([AirSensor(name, coordinates,
forecast, async_get_clientsession(hass),
)],
True)
def round_state(func):
"""Round state."""
def _decorator(self):
res = func(self)
if isinstance(res, float):
return round(res, 2)
return res
return _decorator
class AirSensor(AirQualityEntity):
"""Representation of an Yr.no sensor."""
def __init__(self, name, coordinates, forecast, session):
"""Initialize the sensor."""
import metno
self._name = name
self._api = metno.AirQualityData(coordinates, forecast, session)
@property
def attribution(self) -> str:
"""Return the attribution."""
return ATTRIBUTION
@property
def device_state_attributes(self) -> dict:
"""Return other details about the sensor state."""
return {'level': self._api.data.get('level'),
'location': self._api.data.get('location'),
}
@property
def name(self) -> str:
"""Return the name of the sensor."""
return self._name
@property
@round_state
def air_quality_index(self):
"""Return the Air Quality Index (AQI)."""
return self._api.data.get('aqi')
@property
@round_state
def nitrogen_dioxide(self):
"""Return the NO2 (nitrogen dioxide) level."""
return self._api.data.get('no2_concentration')
@property
@round_state
def ozone(self):
"""Return the O3 (ozone) level."""
return self._api.data.get('o3_concentration')
@property
@round_state
def particulate_matter_2_5(self):
"""Return the particulate matter 2.5 level."""
return self._api.data.get('pm25_concentration')
@property
@round_state
def particulate_matter_10(self):
"""Return the particulate matter 10 level."""
return self._api.data.get('pm10_concentration')
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._api.units.get('pm25_concentration')
async def async_update(self) -> None:
"""Update the sensor."""
await self._api.update()
|
{
"content_hash": "f752c15d28eeb50bbbf207904df5adf3",
"timestamp": "",
"source": "github",
"line_count": 140,
"max_line_length": 79,
"avg_line_length": 29.9,
"alnum_prop": 0.6316292403248925,
"repo_name": "nugget/home-assistant",
"id": "712f2734ea8c667a5c41042b0dd7183b00717182",
"size": "4186",
"binary": false,
"copies": "3",
"ref": "refs/heads/dev",
"path": "homeassistant/components/air_quality/norway_air.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "1175"
},
{
"name": "Dockerfile",
"bytes": "1081"
},
{
"name": "HCL",
"bytes": "826"
},
{
"name": "Python",
"bytes": "14492390"
},
{
"name": "Ruby",
"bytes": "745"
},
{
"name": "Shell",
"bytes": "17526"
}
],
"symlink_target": ""
}
|
"""This test checks for correct fork() behavior.
"""
import _imp as imp
import os
import signal
import sys
import time
from test.fork_wait import ForkWait
from test.support import (reap_children, get_attribute,
import_module, verbose)
threading = import_module('threading')
# Skip test if fork does not exist.
get_attribute(os, 'fork')
class ForkTest(ForkWait):
def wait_impl(self, cpid):
deadline = time.monotonic() + 10.0
while time.monotonic() <= deadline:
# waitpid() shouldn't hang, but some of the buildbots seem to hang
# in the forking tests. This is an attempt to fix the problem.
spid, status = os.waitpid(cpid, os.WNOHANG)
if spid == cpid:
break
time.sleep(0.1)
self.assertEqual(spid, cpid)
self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
def test_threaded_import_lock_fork(self):
"""Check fork() in main thread works while a subthread is doing an import"""
import_started = threading.Event()
fake_module_name = "fake test module"
partial_module = "partial"
complete_module = "complete"
def importer():
imp.acquire_lock()
sys.modules[fake_module_name] = partial_module
import_started.set()
time.sleep(0.01) # Give the other thread time to try and acquire.
sys.modules[fake_module_name] = complete_module
imp.release_lock()
t = threading.Thread(target=importer)
t.start()
import_started.wait()
pid = os.fork()
try:
# PyOS_BeforeFork should have waited for the import to complete
# before forking, so the child can recreate the import lock
# correctly, but also won't see a partially initialised module
if not pid:
m = __import__(fake_module_name)
if m == complete_module:
os._exit(0)
else:
if verbose > 1:
print("Child encountered partial module")
os._exit(1)
else:
t.join()
# Exitcode 1 means the child got a partial module (bad.) No
# exitcode (but a hang, which manifests as 'got pid 0')
# means the child deadlocked (also bad.)
self.wait_impl(pid)
finally:
try:
os.kill(pid, signal.SIGKILL)
except OSError:
pass
def test_nested_import_lock_fork(self):
"""Check fork() in main thread works while the main thread is doing an import"""
# Issue 9573: this used to trigger RuntimeError in the child process
def fork_with_import_lock(level):
release = 0
in_child = False
try:
try:
for i in range(level):
imp.acquire_lock()
release += 1
pid = os.fork()
in_child = not pid
finally:
for i in range(release):
imp.release_lock()
except RuntimeError:
if in_child:
if verbose > 1:
print("RuntimeError in child")
os._exit(1)
raise
if in_child:
os._exit(0)
self.wait_impl(pid)
# Check this works with various levels of nested
# import in the main thread
for level in range(5):
fork_with_import_lock(level)
def tearDownModule():
reap_children()
if __name__ == "__main__":
unittest.main()
|
{
"content_hash": "2fc62769d3e5fecd7c8788ef2ffc34ae",
"timestamp": "",
"source": "github",
"line_count": 111,
"max_line_length": 88,
"avg_line_length": 34.32432432432432,
"alnum_prop": 0.5244094488188976,
"repo_name": "munyirik/python",
"id": "eeba306f452ce912b1ad5e29471b73462ac29ba3",
"size": "3810",
"binary": false,
"copies": "7",
"ref": "refs/heads/develop",
"path": "cpython/Lib/test/test_fork1.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [
{
"name": "Assembly",
"bytes": "470920"
},
{
"name": "Batchfile",
"bytes": "35551"
},
{
"name": "C",
"bytes": "17872871"
},
{
"name": "C#",
"bytes": "1231"
},
{
"name": "C++",
"bytes": "356072"
},
{
"name": "CSS",
"bytes": "2839"
},
{
"name": "Common Lisp",
"bytes": "24481"
},
{
"name": "DIGITAL Command Language",
"bytes": "26402"
},
{
"name": "Groff",
"bytes": "254942"
},
{
"name": "HTML",
"bytes": "130698"
},
{
"name": "JavaScript",
"bytes": "10616"
},
{
"name": "Makefile",
"bytes": "25026"
},
{
"name": "Objective-C",
"bytes": "33182"
},
{
"name": "PLSQL",
"bytes": "22886"
},
{
"name": "PostScript",
"bytes": "13803"
},
{
"name": "PowerShell",
"bytes": "1420"
},
{
"name": "Prolog",
"bytes": "557"
},
{
"name": "Python",
"bytes": "24911704"
},
{
"name": "R",
"bytes": "5378"
},
{
"name": "Shell",
"bytes": "437386"
},
{
"name": "TeX",
"bytes": "323102"
},
{
"name": "Visual Basic",
"bytes": "481"
}
],
"symlink_target": ""
}
|
from textwrap import dedent
import pytest
from structlog_pretty.processors import XMLPrettifier as uut
cases = [
('<elem/>', '<elem/>'),
('<elem />', '<elem/>'),
('<wrapper><elem/></wrapper>', dedent('''
<wrapper>
<elem/>
</wrapper>
''').strip()),
('<wrapper><elem/><elem/></wrapper>', dedent('''
<wrapper>
<elem/>
<elem/>
</wrapper>
''').strip()),
(
dedent('''
<wrapper>
<elem/>
<elem/>
</wrapper>
''').strip(),
dedent('''
<wrapper>
<elem/>
<elem/>
</wrapper>
''').strip(),
),
]
modes = ('slow', 'fast')
@pytest.mark.parametrize(['mode', 'param', 'expected'], [
[mode] + list(case) for mode in modes for case in cases
])
def test_run(mode, param, expected, monkeypatch):
monkeypatch.setattr('structlog_pretty.processors.fast_xml_available', mode == 'fast')
processor = uut(xml_fields=['param'])
event_dict = processor(None, None, {'param': param})
assert event_dict == {'param': expected}
@pytest.mark.parametrize(['mode', 'param', 'expected'], [
(mode, case[0], case[0]) for mode in modes for case in cases
])
def test_field_name_setting(mode, param, expected, monkeypatch):
monkeypatch.setattr('structlog_pretty.processors.fast_xml_available', mode == 'fast')
processor = uut(xml_fields=['not_the_param'])
event_dict = processor(None, None, {'param': param})
assert event_dict == {'param': expected}
|
{
"content_hash": "cbe230b97457ad68133ae4329e2cb0c0",
"timestamp": "",
"source": "github",
"line_count": 55,
"max_line_length": 89,
"avg_line_length": 28.78181818181818,
"alnum_prop": 0.542008843967151,
"repo_name": "underyx/structlog-pretty",
"id": "f2edfa854600979b9d2524a3aea684b2876f9669",
"size": "1583",
"binary": false,
"copies": "1",
"ref": "refs/heads/main",
"path": "test/test_XMLPrettifier.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Python",
"bytes": "14557"
}
],
"symlink_target": ""
}
|
"""Unittests to make sure we generate and update the expected-*.txt files
properly after running layout tests."""
import os
import shutil
import tempfile
import unittest
import compare_failures
import path_utils
import test_failures
class CompareFailuresUnittest(unittest.TestCase):
def setUp(self):
"""Makes a temporary results directory and puts expected-failures.txt and
expected-crashes.txt into it."""
self._tempdir = tempfile.mkdtemp()
# copy over expected-*.txt files
testdatadir = self.GetTestDataDir()
filenames = ("expected-passing.txt", "expected-failures.txt",
"expected-crashes.txt")
for filename in filenames:
# copyfile doesn't copy file permissions so we can delete the files later
shutil.copyfile(os.path.join(testdatadir, filename),
os.path.join(self._tempdir, filename))
def tearDown(self):
"""Remove temp directory."""
shutil.rmtree(self._tempdir)
self._tempdir = None
###########################################################################
# Tests
def testGenerateNewBaseline(self):
"""Test the generation of new expected-*.txt files when either they don't
exist or the user explicitly asks to make new files."""
failures = self.GetTestFailures()
# Test to make sure we generate baseline files if the file doesn't exist.
os.remove(os.path.join(self.GetTmpDir(), 'expected-passing.txt'))
os.remove(os.path.join(self.GetTmpDir(), 'expected-failures.txt'))
os.remove(os.path.join(self.GetTmpDir(), 'expected-crashes.txt'))
# Test force generation of new baseline files with a new failure and one
# less passing.
pass_file = os.path.join(path_utils.LayoutTestsDir(), 'fast', 'pass1.html')
failures[pass_file] = [test_failures.FailureTextMismatch(None)]
cf = compare_failures.CompareFailures(self.GetTestFiles(), failures,
set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing-new-baseline.txt',
'expected-failures-added.txt',
'expected-crashes.txt')
def testPassingToFailure(self):
"""When there's a new failure, we don't add it to the baseline."""
failures = self.GetTestFailures()
# Test case where we don't update new baseline. We have a new failure,
# but it shouldn't be added to the expected-failures.txt file.
pass_file = os.path.join(path_utils.LayoutTestsDir(), 'fast', 'pass1.html')
failures[pass_file] = [test_failures.FailureTextMismatch(None)]
self.CheckNoChanges(failures)
# Same thing as before: pass -> crash
failures[pass_file] = [test_failures.FailureCrash()]
self.CheckNoChanges(failures)
def testFailureToCrash(self):
"""When there's a new crash, we don't add it to the baseline or remove it
from the failure list."""
failures = self.GetTestFailures()
# Test case where we don't update new baseline. A failure moving to a
# crash shouldn't be added to the expected-crashes.txt file.
failure_file = os.path.join(path_utils.LayoutTestsDir(),
'fast', 'foo', 'fail1.html')
failures[failure_file] = [test_failures.FailureCrash()]
self.CheckNoChanges(failures)
def testFailureToPassing(self):
"""This is better than before, so we should update the failure list."""
failures = self.GetTestFailures()
# Remove one of the failing test cases from the failures dictionary. This
# makes failure_file considered to be passing.
failure_file = os.path.join(path_utils.LayoutTestsDir(),
'fast', 'bar', 'fail2.html')
del failures[failure_file]
cf = compare_failures.CompareFailures(self.GetTestFiles(), failures,
set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing-new-passing2.txt',
'expected-failures-new-passing.txt',
'expected-crashes.txt')
def testCrashToPassing(self):
"""This is better than before, so we update the crashes file."""
failures = self.GetTestFailures()
crash_file = os.path.join(path_utils.LayoutTestsDir(),
'fast', 'bar', 'betz', 'crash3.html')
del failures[crash_file]
cf = compare_failures.CompareFailures(self.GetTestFiles(), failures,
set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing-new-passing.txt',
'expected-failures.txt',
'expected-crashes-new-passing.txt')
def testCrashToFailure(self):
"""This is better than before, so we should update both lists."""
failures = self.GetTestFailures()
crash_file = os.path.join(path_utils.LayoutTestsDir(),
'fast', 'bar', 'betz', 'crash3.html')
failures[crash_file] = [test_failures.FailureTextMismatch(None)]
cf = compare_failures.CompareFailures(self.GetTestFiles(), failures,
set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing.txt',
'expected-failures-new-crash.txt',
'expected-crashes-new-passing.txt')
def testNewTestPass(self):
"""After a merge, we need to update new passing tests properly."""
files = self.GetTestFiles()
new_test_file = os.path.join(path_utils.LayoutTestsDir(), "new-test.html")
files.add(new_test_file)
failures = self.GetTestFailures()
# New test file passing
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing-new-test.txt',
'expected-failures.txt',
'expected-crashes.txt')
def testNewTestFail(self):
"""After a merge, we need to update new failing tests properly."""
files = self.GetTestFiles()
new_test_file = os.path.join(path_utils.LayoutTestsDir(), "new-test.html")
files.add(new_test_file)
failures = self.GetTestFailures()
# New test file failing
failures[new_test_file] = [test_failures.FailureTextMismatch(None)]
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing.txt',
'expected-failures-new-test.txt',
'expected-crashes.txt')
def testNewTestCrash(self):
"""After a merge, we need to update new crashing tests properly."""
files = self.GetTestFiles()
new_test_file = os.path.join(path_utils.LayoutTestsDir(), "new-test.html")
files.add(new_test_file)
failures = self.GetTestFailures()
# New test file crashing
failures[new_test_file] = [test_failures.FailureCrash()]
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing.txt',
'expected-failures.txt',
'expected-crashes-new-test.txt')
def testHasNewFailures(self):
files = self.GetTestFiles()
failures = self.GetTestFailures()
# no changes, no new failures
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
self.failUnless(not cf.HasNewFailures())
# test goes from passing to failing
pass_file = os.path.join(path_utils.LayoutTestsDir(), 'fast', 'pass1.html')
failures[pass_file] = [test_failures.FailureTextMismatch(None)]
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
self.failUnless(cf.HasNewFailures())
# Failing to passing
failures = self.GetTestFailures()
failure_file = os.path.join(path_utils.LayoutTestsDir(),
'fast', 'bar', 'fail2.html')
del failures[failure_file]
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
self.failUnless(not cf.HasNewFailures())
# A new test that fails, this doesn't count as a new failure.
new_test_file = os.path.join(path_utils.LayoutTestsDir(), "new-test.html")
files.add(new_test_file)
failures = self.GetTestFailures()
failures[new_test_file] = [test_failures.FailureCrash()]
cf = compare_failures.CompareFailures(files, failures, set(), set(),
self.GetTmpDir(), False)
self.failUnless(not cf.HasNewFailures())
###########################################################################
# Helper methods
def CheckOutputEqualsExpectedFile(self, output, expected):
"""Compares a file in our output dir against a file from the testdata
directory."""
output = os.path.join(self.GetTmpDir(), output)
expected = os.path.join(self.GetTestDataDir(), expected)
self.failUnlessEqual(open(output).read(), open(expected).read())
def CheckOutputWithExpectedFiles(self, passing, failing, crashing):
"""Compare all three output files against three provided expected
files."""
self.CheckOutputEqualsExpectedFile('expected-passing.txt', passing)
self.CheckOutputEqualsExpectedFile('expected-failures.txt', failing)
self.CheckOutputEqualsExpectedFile('expected-crashes.txt', crashing)
def CheckNoChanges(self, failures):
"""Verify that none of the expected-*.txt files have changed."""
cf = compare_failures.CompareFailures(self.GetTestFiles(), failures,
set(), set(),
self.GetTmpDir(), False)
cf.UpdateFailuresOnDisk()
self.CheckOutputWithExpectedFiles('expected-passing.txt',
'expected-failures.txt',
'expected-crashes.txt')
def GetTestDataDir(self):
return os.path.abspath('testdata')
def GetTmpDir(self):
return self._tempdir
def GetTestFiles(self):
"""Get a set of files that includes the expected crashes and failures
along with two passing tests."""
layout_dir = path_utils.LayoutTestsDir()
files = [
'fast\\pass1.html',
'fast\\foo\\pass2.html',
'fast\\foo\\crash1.html',
'fast\\bar\\crash2.html',
'fast\\bar\\betz\\crash3.html',
'fast\\foo\\fail1.html',
'fast\\bar\\fail2.html',
'fast\\bar\\betz\\fail3.html',
]
return set([os.path.join(layout_dir, f) for f in files])
def GetTestFailures(self):
"""Get a dictionary representing the crashes and failures in the
expected-*.txt files."""
failures = {}
for filename in self.GetTestFiles():
if filename.find('crash') != -1:
failures[filename] = [test_failures.FailureCrash()]
elif filename.find('fail') != -1:
failures[filename] = [test_failures.FailureTextMismatch(None)]
return failures
if '__main__' == __name__:
unittest.main()
|
{
"content_hash": "47f3b3bdcd167909c130765197cd9c93",
"timestamp": "",
"source": "github",
"line_count": 276,
"max_line_length": 79,
"avg_line_length": 43.19927536231884,
"alnum_prop": 0.6081523106600688,
"repo_name": "amyvmiwei/chromium",
"id": "26e2adf21fb85f2698619f76d03f0113bab5d686",
"size": "12113",
"binary": false,
"copies": "2",
"ref": "refs/heads/trunk",
"path": "webkit/tools/layout_tests/layout_package/compare_failures_unittest.py",
"mode": "33188",
"license": "bsd-3-clause",
"language": [],
"symlink_target": ""
}
|
import os, re, subprocess
user_dir = os.path.expanduser('~')
def num_images():
n1 = subprocess.Popen(['ls', 'django_cam_controller/static/img'], stdout=subprocess.PIPE)
n2 = subprocess.Popen(['wc', '-l'], stdin=n1.stdout, stdout=subprocess.PIPE)
result = n2.stdout.read()
num_images = result.replace("\n", "")
return num_images
def capture_image():
capture_image = subprocess.Popen(['gphoto2', '--capture-image-and-download', '--filename', 'django_cam_controller/static/img/%:', '--keep'], stdout=subprocess.PIPE)
capture_result = capture_image.stdout.read()
os.system("gphoto2 --summary")
return capture_result
def capture_interval(frames ,sec):
if (sec):
os.system("gphoto2 --capture-image -F %s --interval %s" % (frames, sec))
message = "Capturing at interval of %s for %s frames." % (sec, frames)
else:
message = "No interval set. Please set an interval (in seconds)."
return message
def compile_video(framerate):
os.chdir('django_cam_controller/static/img/')
os.system("mogrify -auto-orient -resize 800x600! *.jpg")
os.system("ffmpeg -y -pattern_type glob -framerate %s -i '*.jpg' -an -s hd720 -vcodec libx264 -pix_fmt yuv420p -preset slow -profile:v baseline -movflags faststart ../video/preview.mp4" % framerate)
os.chdir("../../../")
message = "Movie compiled"
return message
def new_sequence():
img_dir = "django_cam_controller/static/img"
os.system("rm -rf %s" % img_dir)
os.makedirs(img_dir)
message = "Ready to capture new sequence"
return message
|
{
"content_hash": "cf4fc97a6baf5a0b5a877b39006c46a9",
"timestamp": "",
"source": "github",
"line_count": 52,
"max_line_length": 202,
"avg_line_length": 31.576923076923077,
"alnum_prop": 0.6358099878197321,
"repo_name": "davis1410/django_cam_controller",
"id": "55989d9f0cdf3d22b249439a3ae60427fbd2924d",
"size": "1642",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "scripts/django_cam_controller.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "HTML",
"bytes": "1806"
},
{
"name": "Python",
"bytes": "3850"
}
],
"symlink_target": ""
}
|
"""Class to hold all switch accessories."""
from __future__ import annotations
import logging
from typing import NamedTuple
from pyhap.const import (
CATEGORY_FAUCET,
CATEGORY_OUTLET,
CATEGORY_SHOWER_HEAD,
CATEGORY_SPRINKLER,
CATEGORY_SWITCH,
)
from homeassistant.components import button, input_button
from homeassistant.components.input_select import ATTR_OPTIONS, SERVICE_SELECT_OPTION
from homeassistant.components.switch import DOMAIN
from homeassistant.components.vacuum import (
DOMAIN as VACUUM_DOMAIN,
SERVICE_RETURN_TO_BASE,
SERVICE_START,
STATE_CLEANING,
VacuumEntityFeature,
)
from homeassistant.const import (
ATTR_ENTITY_ID,
ATTR_SUPPORTED_FEATURES,
CONF_TYPE,
SERVICE_TURN_OFF,
SERVICE_TURN_ON,
STATE_ON,
)
from homeassistant.core import callback, split_entity_id
from homeassistant.helpers.event import async_call_later
from .accessories import TYPES, HomeAccessory
from .const import (
CHAR_ACTIVE,
CHAR_IN_USE,
CHAR_NAME,
CHAR_ON,
CHAR_OUTLET_IN_USE,
CHAR_VALVE_TYPE,
SERV_OUTLET,
SERV_SWITCH,
SERV_VALVE,
TYPE_FAUCET,
TYPE_SHOWER,
TYPE_SPRINKLER,
TYPE_VALVE,
)
from .util import cleanup_name_for_homekit
_LOGGER = logging.getLogger(__name__)
class ValveInfo(NamedTuple):
"""Category and type information for valve."""
category: int
valve_type: int
VALVE_TYPE: dict[str, ValveInfo] = {
TYPE_FAUCET: ValveInfo(CATEGORY_FAUCET, 3),
TYPE_SHOWER: ValveInfo(CATEGORY_SHOWER_HEAD, 2),
TYPE_SPRINKLER: ValveInfo(CATEGORY_SPRINKLER, 1),
TYPE_VALVE: ValveInfo(CATEGORY_FAUCET, 0),
}
ACTIVATE_ONLY_SWITCH_DOMAINS = {"button", "input_button", "scene", "script"}
ACTIVATE_ONLY_RESET_SECONDS = 10
@TYPES.register("Outlet")
class Outlet(HomeAccessory):
"""Generate an Outlet accessory."""
def __init__(self, *args):
"""Initialize an Outlet accessory object."""
super().__init__(*args, category=CATEGORY_OUTLET)
state = self.hass.states.get(self.entity_id)
serv_outlet = self.add_preload_service(SERV_OUTLET)
self.char_on = serv_outlet.configure_char(
CHAR_ON, value=False, setter_callback=self.set_state
)
self.char_outlet_in_use = serv_outlet.configure_char(
CHAR_OUTLET_IN_USE, value=True
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.async_call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = new_state.state == STATE_ON
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("Switch")
class Switch(HomeAccessory):
"""Generate a Switch accessory."""
def __init__(self, *args):
"""Initialize a Switch accessory object."""
super().__init__(*args, category=CATEGORY_SWITCH)
self._domain, self._object_id = split_entity_id(self.entity_id)
state = self.hass.states.get(self.entity_id)
self.activate_only = self.is_activate(self.hass.states.get(self.entity_id))
serv_switch = self.add_preload_service(SERV_SWITCH)
self.char_on = serv_switch.configure_char(
CHAR_ON, value=False, setter_callback=self.set_state
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def is_activate(self, state):
"""Check if entity is activate only."""
return self._domain in ACTIVATE_ONLY_SWITCH_DOMAINS
def reset_switch(self, *args):
"""Reset switch to emulate activate click."""
_LOGGER.debug("%s: Reset switch to off", self.entity_id)
self.char_on.set_value(False)
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
if self.activate_only and not value:
_LOGGER.debug("%s: Ignoring turn_off call", self.entity_id)
return
params = {ATTR_ENTITY_ID: self.entity_id}
if self._domain == "script":
service = self._object_id
params = {}
elif self._domain == button.DOMAIN:
service = button.SERVICE_PRESS
elif self._domain == input_button.DOMAIN:
service = input_button.SERVICE_PRESS
else:
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.async_call_service(self._domain, service, params)
if self.activate_only:
async_call_later(self.hass, ACTIVATE_ONLY_RESET_SECONDS, self.reset_switch)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
self.activate_only = self.is_activate(new_state)
if self.activate_only:
_LOGGER.debug(
"%s: Ignore state change, entity is activate only", self.entity_id
)
return
current_state = new_state.state == STATE_ON
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("Vacuum")
class Vacuum(Switch):
"""Generate a Switch accessory."""
def set_state(self, value):
"""Move switch state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
state = self.hass.states.get(self.entity_id)
features = state.attributes.get(ATTR_SUPPORTED_FEATURES, 0)
if value:
sup_start = features & VacuumEntityFeature.START
service = SERVICE_START if sup_start else SERVICE_TURN_ON
else:
sup_return_home = features & VacuumEntityFeature.RETURN_HOME
service = SERVICE_RETURN_TO_BASE if sup_return_home else SERVICE_TURN_OFF
self.async_call_service(
VACUUM_DOMAIN, service, {ATTR_ENTITY_ID: self.entity_id}
)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = new_state.state in (STATE_CLEANING, STATE_ON)
_LOGGER.debug("%s: Set current state to %s", self.entity_id, current_state)
self.char_on.set_value(current_state)
@TYPES.register("Valve")
class Valve(HomeAccessory):
"""Generate a Valve accessory."""
def __init__(self, *args):
"""Initialize a Valve accessory object."""
super().__init__(*args)
state = self.hass.states.get(self.entity_id)
valve_type = self.config[CONF_TYPE]
self.category = VALVE_TYPE[valve_type].category
serv_valve = self.add_preload_service(SERV_VALVE)
self.char_active = serv_valve.configure_char(
CHAR_ACTIVE, value=False, setter_callback=self.set_state
)
self.char_in_use = serv_valve.configure_char(CHAR_IN_USE, value=False)
self.char_valve_type = serv_valve.configure_char(
CHAR_VALVE_TYPE, value=VALVE_TYPE[valve_type].valve_type
)
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def set_state(self, value):
"""Move value state to value if call came from HomeKit."""
_LOGGER.debug("%s: Set switch state to %s", self.entity_id, value)
self.char_in_use.set_value(value)
params = {ATTR_ENTITY_ID: self.entity_id}
service = SERVICE_TURN_ON if value else SERVICE_TURN_OFF
self.async_call_service(DOMAIN, service, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_state = 1 if new_state.state == STATE_ON else 0
_LOGGER.debug("%s: Set active state to %s", self.entity_id, current_state)
self.char_active.set_value(current_state)
_LOGGER.debug("%s: Set in_use state to %s", self.entity_id, current_state)
self.char_in_use.set_value(current_state)
@TYPES.register("SelectSwitch")
class SelectSwitch(HomeAccessory):
"""Generate a Switch accessory that contains multiple switches."""
def __init__(self, *args):
"""Initialize a Switch accessory object."""
super().__init__(*args, category=CATEGORY_SWITCH)
self.domain = split_entity_id(self.entity_id)[0]
state = self.hass.states.get(self.entity_id)
self.select_chars = {}
options = state.attributes[ATTR_OPTIONS]
for option in options:
serv_option = self.add_preload_service(
SERV_OUTLET, [CHAR_NAME, CHAR_IN_USE]
)
serv_option.configure_char(
CHAR_NAME, value=cleanup_name_for_homekit(option)
)
serv_option.configure_char(CHAR_IN_USE, value=False)
self.select_chars[option] = serv_option.configure_char(
CHAR_ON,
value=False,
setter_callback=lambda value, option=option: self.select_option(option),
)
self.set_primary_service(self.select_chars[options[0]])
# Set the state so it is in sync on initial
# GET to avoid an event storm after homekit startup
self.async_update_state(state)
def select_option(self, option):
"""Set option from HomeKit."""
_LOGGER.debug("%s: Set option to %s", self.entity_id, option)
params = {ATTR_ENTITY_ID: self.entity_id, "option": option}
self.async_call_service(self.domain, SERVICE_SELECT_OPTION, params)
@callback
def async_update_state(self, new_state):
"""Update switch state after state changed."""
current_option = cleanup_name_for_homekit(new_state.state)
for option, char in self.select_chars.items():
char.set_value(option == current_option)
|
{
"content_hash": "0e25b82a30cdca96eee422a6f19106c1",
"timestamp": "",
"source": "github",
"line_count": 289,
"max_line_length": 88,
"avg_line_length": 36.11072664359862,
"alnum_prop": 0.6383671904944423,
"repo_name": "toddeye/home-assistant",
"id": "1598df015c510f733e70747407f6c98bf7b6d1ce",
"size": "10436",
"binary": false,
"copies": "2",
"ref": "refs/heads/dev",
"path": "homeassistant/components/homekit/type_switches.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Dockerfile",
"bytes": "3005"
},
{
"name": "PLSQL",
"bytes": "840"
},
{
"name": "Python",
"bytes": "47414832"
},
{
"name": "Shell",
"bytes": "6252"
}
],
"symlink_target": ""
}
|
# Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys
import apiutil
apiutil.CopyrightC()
print """
/* DO NOT EDIT - THIS FILE AUTOMATICALLY GENERATED BY feedback_funcs.py SCRIPT */
#ifndef CR_STATE_FEEDBACK_FUNCS_H
#define CR_STATE_FEEDBACK_FUNCS_H
#include "cr_error.h"
#if defined(WINDOWS)
#define STATE_APIENTRY __stdcall
#else
#define STATE_APIENTRY
#endif
#define STATE_UNUSED(x) ((void)x)"""
keys = apiutil.GetDispatchedFunctions(sys.argv[1]+"/APIspec.txt")
for func_name in apiutil.AllSpecials( "feedback" ):
return_type = apiutil.ReturnType(func_name)
params = apiutil.Parameters(func_name)
print '%s STATE_APIENTRY crStateFeedback%s( %s );' % (return_type, func_name, apiutil.MakeDeclarationString(params))
for func_name in apiutil.AllSpecials( "select" ):
return_type = apiutil.ReturnType(func_name)
params = apiutil.Parameters(func_name)
print '%s STATE_APIENTRY crStateSelect%s( %s );' % (return_type, func_name, apiutil.MakeDeclarationString(params))
print '\n#endif /* CR_STATE_FEEDBACK_FUNCS_H */'
|
{
"content_hash": "528d0de1fd92a656b0f32e46c1b0e488",
"timestamp": "",
"source": "github",
"line_count": 39,
"max_line_length": 117,
"avg_line_length": 29,
"alnum_prop": 0.7462422634836428,
"repo_name": "egraba/vbox_openbsd",
"id": "29e90520e56545945e53ce76b554d90db212f3e3",
"size": "1131",
"binary": false,
"copies": "22",
"ref": "refs/heads/master",
"path": "VirtualBox-5.0.0/src/VBox/Additions/common/crOpenGL/feedback/feedback_funcs.py",
"mode": "33261",
"license": "mit",
"language": [
{
"name": "Ada",
"bytes": "88714"
},
{
"name": "Assembly",
"bytes": "4303680"
},
{
"name": "AutoIt",
"bytes": "2187"
},
{
"name": "Batchfile",
"bytes": "95534"
},
{
"name": "C",
"bytes": "192632221"
},
{
"name": "C#",
"bytes": "64255"
},
{
"name": "C++",
"bytes": "83842667"
},
{
"name": "CLIPS",
"bytes": "5291"
},
{
"name": "CMake",
"bytes": "6041"
},
{
"name": "CSS",
"bytes": "26756"
},
{
"name": "D",
"bytes": "41844"
},
{
"name": "DIGITAL Command Language",
"bytes": "56579"
},
{
"name": "DTrace",
"bytes": "1466646"
},
{
"name": "GAP",
"bytes": "350327"
},
{
"name": "Groff",
"bytes": "298540"
},
{
"name": "HTML",
"bytes": "467691"
},
{
"name": "IDL",
"bytes": "106734"
},
{
"name": "Java",
"bytes": "261605"
},
{
"name": "JavaScript",
"bytes": "80927"
},
{
"name": "Lex",
"bytes": "25122"
},
{
"name": "Logos",
"bytes": "4941"
},
{
"name": "Makefile",
"bytes": "426902"
},
{
"name": "Module Management System",
"bytes": "2707"
},
{
"name": "NSIS",
"bytes": "177212"
},
{
"name": "Objective-C",
"bytes": "5619792"
},
{
"name": "Objective-C++",
"bytes": "81554"
},
{
"name": "PHP",
"bytes": "58585"
},
{
"name": "Pascal",
"bytes": "69941"
},
{
"name": "Perl",
"bytes": "240063"
},
{
"name": "PowerShell",
"bytes": "10664"
},
{
"name": "Python",
"bytes": "9094160"
},
{
"name": "QMake",
"bytes": "3055"
},
{
"name": "R",
"bytes": "21094"
},
{
"name": "SAS",
"bytes": "1847"
},
{
"name": "Shell",
"bytes": "1460572"
},
{
"name": "SourcePawn",
"bytes": "4139"
},
{
"name": "TypeScript",
"bytes": "142342"
},
{
"name": "Visual Basic",
"bytes": "7161"
},
{
"name": "XSLT",
"bytes": "1034475"
},
{
"name": "Yacc",
"bytes": "22312"
}
],
"symlink_target": ""
}
|
import errno
import socket
_ASYNC_BLOCKING_ERRNOS = {errno.EAGAIN,
errno.EWOULDBLOCK,
errno.EINTR}
if hasattr(errno, 'WSAEWOULDBLOCK'):
_ASYNC_BLOCKING_ERRNOS.add(errno.WSAEWOULDBLOCK)
__all__ = [
'socketpair'
]
try:
socketpair = socket.socketpair
except AttributeError:
# Origin: https://gist.github.com/4325783, by Geert Jansen. Public domain.
def socketpair(family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0):
if family == socket.AF_INET:
host = '127.0.0.1'
elif family == socket.AF_INET6:
host = '::1'
else:
raise ValueError("Only AF_INET and AF_INET6 socket address families "
"are supported")
if type != socket.SOCK_STREAM:
raise ValueError("Only SOCK_STREAM socket type is supported")
if proto != 0:
raise ValueError("Only protocol zero is supported")
# We create a connected TCP socket. Note the trick with
# setblocking(False) that prevents us from having to create a thread.
lsock = socket.socket(family, type, proto)
try:
lsock.bind((host, 0))
lsock.listen(1)
# On IPv6, ignore flow_info and scope_id
addr, port = lsock.getsockname()[:2]
csock = socket.socket(family, type, proto)
try:
csock.setblocking(False)
try:
csock.connect((addr, port))
except (socket.error, OSError) as e:
if e.errno in _ASYNC_BLOCKING_ERRNOS:
pass
else:
raise
csock.setblocking(True)
ssock, _ = lsock.accept()
except:
csock.close()
raise
finally:
lsock.close()
return ssock, csock
|
{
"content_hash": "0ba25104fd02d9cd12fd891e0c768a70",
"timestamp": "",
"source": "github",
"line_count": 57,
"max_line_length": 81,
"avg_line_length": 34.12280701754386,
"alnum_prop": 0.5280205655526993,
"repo_name": "SethMichaelLarson/picklepipe",
"id": "bc32aaed94d90fa17b336988c9c314c2a78aa99c",
"size": "1945",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "picklepipe/socketpair.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "Batchfile",
"bytes": "3366"
},
{
"name": "PowerShell",
"bytes": "7194"
},
{
"name": "Python",
"bytes": "30952"
},
{
"name": "Shell",
"bytes": "1305"
}
],
"symlink_target": ""
}
|
"""
Utility functions for PDF library.
"""
__author__ = "Mathieu Fenniak"
__author_email__ = "biziqe@mathieu.fenniak.net"
import sys
#try importing pyCrypto for RC4_encrypt function [https://pypi.python.org/pypi/pycrypto]
try:
from Crypto.Cipher import ARC4
PYCRYPTO_AVAILABLE = True
except ImportError:
PYCRYPTO_AVAILABLE = False
try:
import __builtin__ as builtins
except ImportError: # Py3
import builtins
xrange_fn = getattr(builtins, "xrange", range)
_basestring = getattr(builtins, "basestring", str)
bytes_type = type(bytes()) # Works the same in Python 2.X and 3.X
string_type = getattr(builtins, "unicode", str)
int_types = (int, long) if sys.version_info[0] < 3 else (int,)
# Make basic type tests more consistent
def isString(s):
"""Test if arg is a string. Compatible with Python 2 and 3."""
return isinstance(s, _basestring)
def isInt(n):
"""Test if arg is an int. Compatible with Python 2 and 3."""
return isinstance(n, int_types)
def isBytes(b):
"""Test if arg is a bytes instance. Compatible with Python 2 and 3."""
return isinstance(b, bytes_type)
#custom implementation of warnings.formatwarning
def formatWarning(message, category, filename, lineno, line=None):
file = filename.replace("/", "\\").rsplit("\\", 1)[1] # find the file name
return "%s: %s [%s:%s]\n" % (category.__name__, message, file, lineno)
def readUntilWhitespace(stream, maxchars=None):
"""
Reads non-whitespace characters and returns them.
Stops upon encountering whitespace or when maxchars is reached.
"""
txt = b_("")
while True:
tok = stream.read(1)
if tok.isspace() or not tok:
break
txt += tok
if len(txt) == maxchars:
break
return txt
def readNonWhitespace(stream):
"""
Finds and reads the next non-whitespace character (ignores whitespace).
"""
tok = WHITESPACES[0]
while tok in WHITESPACES:
tok = stream.read(1)
return tok
def skipOverWhitespace(stream):
"""
Similar to readNonWhitespace, but returns a Boolean if more than
one whitespace character was read.
"""
tok = WHITESPACES[0]
cnt = 0;
while tok in WHITESPACES:
tok = stream.read(1)
cnt+=1
return (cnt > 1)
def skipOverComment(stream):
tok = stream.read(1)
stream.seek(-1, 1)
if tok == b_('%'):
while tok not in (b_('\n'), b_('\r')):
tok = stream.read(1)
def readUntilRegex(stream, regex, ignore_eof=False):
"""
Reads until the regular expression pattern matched (ignore the match)
Raise PdfStreamError on premature end-of-file.
:param bool ignore_eof: If true, ignore end-of-line and return immediately
"""
name = b_('')
while True:
tok = stream.read(16)
if not tok:
# stream has truncated prematurely
if ignore_eof == True:
return name
else:
raise PdfStreamError("Stream has ended unexpectedly")
m = regex.search(tok)
if m is not None:
name += tok[:m.start()]
stream.seek(m.start()-len(tok), 1)
break
name += tok
return name
class ConvertFunctionsToVirtualList(object):
def __init__(self, lengthFunction, getFunction):
self.lengthFunction = lengthFunction
self.getFunction = getFunction
def __len__(self):
return self.lengthFunction()
def __getitem__(self, index):
if isinstance(index, slice):
indices = xrange_fn(*index.indices(len(self)))
cls = type(self)
return cls(indices.__len__, lambda idx: self[indices[idx]])
if not isInt(index):
raise TypeError("sequence indices must be integers")
len_self = len(self)
if index < 0:
# support negative indexes
index = len_self + index
if index < 0 or index >= len_self:
raise IndexError("sequence index out of range")
return self.getFunction(index)
def RC4_encrypt(key, plaintext):
#if pyCrypto is installed use lib
if PYCRYPTO_AVAILABLE:
cipher = ARC4.new(key)
return cipher.encrypt(plaintext)
#otherwise use pure python
S = [i for i in range(256)]
j = 0
for i in range(256):
j = (j + S[i] + ord_(key[i % len(key)])) % 256
S[i], S[j] = S[j], S[i]
i, j = 0, 0
retval = b_("")
for x in range(len(plaintext)):
i = (i + 1) % 256
j = (j + S[i]) % 256
S[i], S[j] = S[j], S[i]
t = S[(S[i] + S[j]) % 256]
retval += b_(chr(ord_(plaintext[x]) ^ t))
return retval
def matrixMultiply(a, b):
return [[sum([float(i)*float(j)
for i, j in zip(row, col)]
) for col in zip(*b)]
for row in a]
def markLocation(stream):
"""Creates text file showing current location in context."""
# Mainly for debugging
RADIUS = 5000
stream.seek(-RADIUS, 1)
outputDoc = open('PyPDF2_pdfLocation.txt', 'w')
outputDoc.write(stream.read(RADIUS))
outputDoc.write('HERE')
outputDoc.write(stream.read(RADIUS))
outputDoc.close()
stream.seek(-RADIUS, 1)
class PyPdfError(Exception):
pass
class PdfReadError(PyPdfError):
pass
class PageSizeNotDefinedError(PyPdfError):
pass
class PdfReadWarning(UserWarning):
pass
class PdfStreamError(PdfReadError):
pass
if sys.version_info[0] < 3:
def b_(s):
return s
else:
B_CACHE = {}
def b_(s):
bc = B_CACHE
if s in bc:
return bc[s]
if type(s) == bytes:
return s
else:
r = s.encode('latin-1')
if len(s) < 2:
bc[s] = r
return r
def u_(s):
if sys.version_info[0] < 3:
return unicode(s, 'unicode_escape')
else:
return s
def str_(b):
if sys.version_info[0] < 3:
return b
else:
if type(b) == bytes:
return b.decode('latin-1')
else:
return b
def ord_(b):
if sys.version_info[0] < 3 or type(b) == str:
return ord(b)
else:
return b
def chr_(c):
if sys.version_info[0] < 3:
return c
else:
return chr(c)
def barray(b):
if sys.version_info[0] < 3:
return b
else:
return bytearray(b)
def hexencode(b):
if sys.version_info[0] < 3:
return b.encode('hex')
else:
import codecs
coder = codecs.getencoder('hex_codec')
return coder(b)[0]
def hexStr(num):
return hex(num).replace('L', '')
WHITESPACES = [b_(x) for x in [' ', '\n', '\r', '\t', '\x00']]
|
{
"content_hash": "755bd9db6aa0df311f22e17cc160a48e",
"timestamp": "",
"source": "github",
"line_count": 279,
"max_line_length": 88,
"avg_line_length": 24.118279569892472,
"alnum_prop": 0.578986476445237,
"repo_name": "Tristan79/ComicStreamer",
"id": "be0639cc715c74ec117cc1ff83f7d20306a074df",
"size": "8211",
"binary": false,
"copies": "1",
"ref": "refs/heads/master",
"path": "PyPDF2/utils.py",
"mode": "33188",
"license": "apache-2.0",
"language": [
{
"name": "Batchfile",
"bytes": "33"
},
{
"name": "C",
"bytes": "3802"
},
{
"name": "CSS",
"bytes": "23527"
},
{
"name": "HTML",
"bytes": "89291"
},
{
"name": "JavaScript",
"bytes": "263314"
},
{
"name": "Makefile",
"bytes": "4947"
},
{
"name": "NSIS",
"bytes": "6498"
},
{
"name": "Python",
"bytes": "631930"
}
],
"symlink_target": ""
}
|
import sys
reload(sys)
sys.setdefaultencoding('UTF8')
import json
# If any of the keys below are equal to a certain value
# then we can delete it because it's the default value
SAFEDELS = {
"Size": 0,
"config": {
"ExposedPorts": None,
"MacAddress": "",
"NetworkDisabled": False,
"PortSpecs": None,
"VolumeDriver": ""
}
}
SAFEDELS["container_config"] = SAFEDELS["config"]
def makedet(j, safedels):
for k,v in safedels.items():
if k not in j:
continue
if type(v) == dict:
makedet(j[k], v)
elif j[k] == v:
del j[k]
def main():
j = json.load(sys.stdin)
makedet(j, SAFEDELS)
json.dump(j, sys.stdout, sort_keys=True)
if __name__ == '__main__':
main()
|
{
"content_hash": "9d178f970157b78875f1599b67697aae",
"timestamp": "",
"source": "github",
"line_count": 35,
"max_line_length": 55,
"avg_line_length": 22.285714285714285,
"alnum_prop": 0.5602564102564103,
"repo_name": "triton/triton",
"id": "439c213138782078dc583b64732b5208b460a566",
"size": "906",
"binary": false,
"copies": "5",
"ref": "refs/heads/master",
"path": "pkgs/build-support/docker/detjson.py",
"mode": "33188",
"license": "mit",
"language": [
{
"name": "C",
"bytes": "19566"
},
{
"name": "C++",
"bytes": "654"
},
{
"name": "CMake",
"bytes": "1035"
},
{
"name": "CSS",
"bytes": "1837"
},
{
"name": "Dockerfile",
"bytes": "553"
},
{
"name": "Emacs Lisp",
"bytes": "673"
},
{
"name": "Go",
"bytes": "373"
},
{
"name": "JavaScript",
"bytes": "936"
},
{
"name": "Nix",
"bytes": "6740090"
},
{
"name": "Perl",
"bytes": "166382"
},
{
"name": "Python",
"bytes": "36406"
},
{
"name": "Ruby",
"bytes": "6523"
},
{
"name": "Shell",
"bytes": "469375"
},
{
"name": "XSLT",
"bytes": "6371"
},
{
"name": "sed",
"bytes": "794"
}
],
"symlink_target": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.