repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
|---|---|---|---|---|---|
peiyuwang/pants
|
src/python/pants/help/build_dictionary_info_extracter.py
|
12
|
10702
|
# coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import inspect
import re
import textwrap
from collections import OrderedDict, namedtuple
from pants.base.exceptions import TaskError
from pants.build_graph.target import Target
from pants.util.memo import memoized_method
class FunctionArg(namedtuple('_FunctionArg', ['name', 'description', 'has_default', 'default'])):
"""An argument to a function."""
pass
class BuildSymbolInfo(namedtuple('_BuildSymbolInfo',
['symbol', 'description', 'details_lines', 'args'])):
"""A container for help information about a symbol that can be used in a BUILD file.
symbol: The name of the symbol.
description: A single line of text providing a summary description.
details_lines: A list of lines of text providing further details (possibly empty).
args: A list of FunctionArg instances.
"""
def details(self):
return '\n'.join(self.details_lines)
class BuildDictionaryInfoExtracter(object):
"""Extracts help information about the symbols that may be used in BUILD files."""
ADD_DESCR = '<Add description>'
basic_target_args = [
FunctionArg('dependencies', '', True, []),
FunctionArg('description', '', True, None),
FunctionArg('name', '', False, None),
FunctionArg('no_cache', '', True, False),
FunctionArg('tags', '', True, None),
]
@classmethod
def get_description_from_docstring(cls, obj):
"""Returns a pair (description, details) from the obj's docstring.
description is a single line.
details is a list of subsequent lines, possibly empty.
"""
doc = obj.__doc__ or ''
p = doc.find('\n')
if p == -1:
return doc, []
else:
description = doc[:p]
details = textwrap.dedent(doc[p+1:]).splitlines()
# Remove leading and trailing empty lines.
while details and not details[0].strip():
details = details[1:]
while details and not details[-1].strip():
details.pop()
recording = True
details_without_params = []
for detail_line in details:
if ":param" in detail_line:
recording = False
if not detail_line.strip():
recording = True
if recording:
details_without_params.append(detail_line)
return description, details_without_params
@classmethod
@memoized_method
def _get_stanza_first_line_re(cls):
"""Returns a regex that can be used to find the first line of a stanza in a docstring.
The returned regex can be used to find the first line where there is not a data type
in the arg name (e.g., :param a:), where there is a data type in the arg name
(e.g., :param str a:), where there is a single word between the colons (e.g., :returns:),
and where a newline immediately follows the second colon in the stanza.
"""
return re.compile(':(\w+)\s*(\w+\s+)?(\w*):\s*(.*)')
@classmethod
@memoized_method
def _get_default_value_re(cls):
return re.compile(' \([Dd]efault: (.*)\)')
@classmethod
def get_arg_descriptions_from_docstring(cls, obj):
"""Returns an ordered map of arg name -> arg description found in :param: stanzas."""
ret = OrderedDict()
name = ''
doc = obj.__doc__ or ''
lines = [s.strip() for s in doc.split('\n')]
stanza_first_line_re = cls._get_stanza_first_line_re()
for line in lines:
m = stanza_first_line_re.match(line)
if m and m.group(1) == 'param':
# If first line of a parameter description, set name and description.
name, description = m.group(3, 4)
ret[name] = description
elif m and m.group(1) != 'param':
# If first line of a description of an item other than a parameter, clear name.
name = ''
elif name and line:
# If subsequent line of a parameter description, add to existing description (if any) for
# that parameter.
ret[name] += (' ' + line) if ret[name] else line
# Ignore subsequent lines of descriptions of items other than parameters.
return ret
@classmethod
def get_args_for_target_type(cls, target_type):
return list(cls._get_args_for_target_type(target_type))
@classmethod
def _get_args_for_target_type(cls, target_type):
args = {} # name: info.
# Target.__init__ has several args that are passed to it by TargetAddressable and not by
# the BUILD file author, so we can't naively inspect it. Instead we special-case its
# true BUILD-file-facing arguments here.
for arg in cls.basic_target_args:
args[arg.name] = arg # Don't yield yet; subclass might supply a better description.
# Non-BUILD-file-facing Target.__init__ args that some Target subclasses capture in their
# own __init__ for various reasons.
ignore_args = {'address', 'payload'}
# Now look at the MRO, in reverse (so we see the more 'common' args first).
# If we see info for an arg, it's more specific than whatever description we have so far,
# so clobber its entry in the args dict.
methods_seen = set() # Ensure we only look at each __init__ method once.
for _type in reversed([t for t in target_type.mro() if issubclass(t, Target)]):
if (inspect.ismethod(_type.__init__) and
_type.__init__ not in methods_seen and
_type.__init__ != Target.__init__):
for arg in cls._get_function_args(_type.__init__):
args[arg.name] = arg
methods_seen.add(_type.__init__)
for arg_name in sorted(args.keys()):
if not arg_name in ignore_args:
yield args[arg_name]
@classmethod
def get_function_args(cls, func):
"""Returns pairs (arg, default) for each argument of func, in declaration order.
Ignores *args, **kwargs. Ignores self for methods.
"""
return list(cls._get_function_args(func))
@classmethod
def _get_function_args(cls, func):
arg_descriptions = cls.get_arg_descriptions_from_docstring(func)
argspec = inspect.getargspec(func)
arg_names = argspec.args
if inspect.ismethod(func) or func.__name__ == '__new__':
arg_names = arg_names[1:]
num_defaulted_args = len(argspec.defaults) if argspec.defaults is not None else 0
first_defaulted_arg = len(arg_names) - num_defaulted_args
for i in range(0, first_defaulted_arg):
yield FunctionArg(arg_names[i], arg_descriptions.pop(arg_names[i], ''), False, None)
for i in range(first_defaulted_arg, len(arg_names)):
yield FunctionArg(arg_names[i], arg_descriptions.pop(arg_names[i], ''), True,
argspec.defaults[i - first_defaulted_arg])
if argspec.varargs:
yield FunctionArg('*{}'.format(argspec.varargs), arg_descriptions.pop(argspec.varargs, None),
False, None)
if argspec.keywords:
# Any remaining arg_descriptions are for kwargs.
for arg_name, descr in arg_descriptions.items():
# Get the default value out of the description, if present.
mo = cls._get_default_value_re().search(descr)
default_value = mo.group(1) if mo else None
descr_sans_default = '{}{}'.format(descr[:mo.start()], descr[mo.end():]) if mo else descr
yield FunctionArg(arg_name, descr_sans_default, True, default_value)
def __init__(self, buildfile_aliases):
self._buildfile_aliases = buildfile_aliases
def get_target_args(self, alias):
"""Returns a list of FunctionArgs for the specified target_type."""
target_types = list(self._buildfile_aliases.target_types_by_alias.get(alias))
if not target_types:
raise TaskError('No such target type: {}'.format(alias))
return self.get_args_for_target_type(target_types[0])
def get_object_args(self, alias):
obj_type = self._buildfile_aliases.objects.get(alias)
if not obj_type:
raise TaskError('No such object type: {}'.format(alias))
if inspect.isfunction(obj_type) or inspect.ismethod(obj_type):
return self.get_function_args(obj_type)
elif inspect.isclass(obj_type) and inspect.ismethod(obj_type.__init__):
return self.get_function_args(obj_type.__init__)
elif inspect.isclass(obj_type):
return self.get_function_args(obj_type.__new__)
elif hasattr(obj_type, '__call__'):
return self.get_function_args(obj_type.__call__)
else:
return []
def get_object_factory_args(self, alias):
obj_factory = self._buildfile_aliases.context_aware_object_factories.get(alias)
if not obj_factory:
raise TaskError('No such context aware object factory: {}'.format(alias))
return self.get_function_args(obj_factory.__call__)
def get_target_type_info(self):
"""Returns a sorted list of BuildSymbolInfo for all known target types."""
return sorted(self._get_target_type_info())
def _get_target_type_info(self):
for alias, target_type in self._buildfile_aliases.target_types.items():
description, details = self.get_description_from_docstring(target_type)
description = description or self.ADD_DESCR
yield BuildSymbolInfo(alias, description, details, self.get_target_args(alias))
for alias, target_macro_factory in self._buildfile_aliases.target_macro_factories.items():
# Take the description from the first target type we encounter that has one.
target_args = self.get_target_args(alias)
for target_type in target_macro_factory.target_types:
description, details = self.get_description_from_docstring(target_type)
if description:
yield BuildSymbolInfo(alias, description, details, target_args)
break
else:
yield BuildSymbolInfo(alias, self.ADD_DESCR, [], target_args)
def get_object_info(self):
return sorted(self._get_object_info())
def _get_object_info(self):
for alias, obj in self._buildfile_aliases.objects.items():
description, details = self.get_description_from_docstring(obj)
description = description or self.ADD_DESCR
yield BuildSymbolInfo(alias, description, details, self.get_object_args(alias))
def get_object_factory_info(self):
return sorted(self._get_object_factory_info())
def _get_object_factory_info(self):
for alias, factory_type in self._buildfile_aliases.context_aware_object_factories.items():
description, details = self.get_description_from_docstring(factory_type)
description = description or self.ADD_DESCR
yield BuildSymbolInfo(alias, description, details, self.get_object_factory_args(alias))
|
apache-2.0
|
manisandro/QGIS
|
python/plugins/processing/core/ProcessingResults.py
|
17
|
1679
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
ProcessingResults.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QObject, pyqtSignal
class ProcessingResults(QObject):
resultAdded = pyqtSignal()
results = []
def addResult(self, icon, name, timestamp, result):
self.results.append(Result(icon, name, timestamp, result))
self.resultAdded.emit()
def getResults(self):
return self.results
class Result:
def __init__(self, icon, name, timestamp, filename):
self.icon = icon
self.name = name
self.timestamp = timestamp
self.filename = filename
resultsList = ProcessingResults()
|
gpl-2.0
|
HaebinShin/tensorflow
|
tensorflow/contrib/linear_optimizer/python/kernel_tests/sdca_ops_test.py
|
1
|
34924
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for SdcaModel."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import uuid
import tensorflow as tf
from tensorflow.contrib.linear_optimizer.python.ops.sdca_ops import _sdca_ops
from tensorflow.contrib.linear_optimizer.python.ops.sdca_ops import _ShardedMutableHashTable
from tensorflow.contrib.linear_optimizer.python.ops.sdca_ops import SdcaModel
from tensorflow.python.framework.test_util import TensorFlowTestCase
from tensorflow.python.platform import googletest
_MAX_ITERATIONS = 100
_SHARD_NUMBERS = [None, 1, 3, 10]
def make_example_proto(feature_dict, target, value=1.0):
e = tf.train.Example()
features = e.features
features.feature['target'].float_list.value.append(target)
for key, values in feature_dict.items():
features.feature[key + '_indices'].int64_list.value.extend(values)
features.feature[key + '_values'].float_list.value.extend([value] *
len(values))
return e
def make_example_dict(example_protos, example_weights):
def parse_examples(example_protos):
features = {
'target': tf.FixedLenFeature(shape=[1],
dtype=tf.float32,
default_value=0),
'age_indices': tf.VarLenFeature(dtype=tf.int64),
'age_values': tf.VarLenFeature(dtype=tf.float32),
'gender_indices': tf.VarLenFeature(dtype=tf.int64),
'gender_values': tf.VarLenFeature(dtype=tf.float32)
}
return tf.parse_example(
[e.SerializeToString() for e in example_protos], features)
sparse_merge = lambda ids, values: tf.sparse_merge(ids, values, ids.shape[1])
parsed = parse_examples(example_protos)
sparse_features = [
sparse_merge(parsed['age_indices'], parsed['age_values']),
sparse_merge(parsed['gender_indices'], parsed['gender_values'])
]
return dict(sparse_features=sparse_features,
dense_features=[],
example_weights=example_weights,
example_labels=tf.reshape(parsed['target'], [-1]),
example_ids=['%d' % i for i in range(0, len(example_protos))])
def make_dense_examples_dict(dense_feature_values, weights, labels):
dense_feature_tensors = ([
tf.convert_to_tensor(values,
dtype=tf.float32) for values in dense_feature_values
])
return dict(sparse_features=[],
dense_features=dense_feature_tensors,
example_weights=weights,
example_labels=labels,
example_ids=['%d' % i for i in range(0, len(labels))])
def make_variable_dict(max_age, max_gender):
# TODO(sibyl-toe9oF2e): Figure out how to derive max_age & max_gender from
# examples_dict.
age_weights = tf.Variable(tf.zeros([max_age + 1], dtype=tf.float32))
gender_weights = tf.Variable(tf.zeros([max_gender + 1], dtype=tf.float32))
return dict(sparse_features_weights=[age_weights, gender_weights],
dense_features_weights=[])
def make_dense_variable_dict(num_dense_features):
feature_weights = ([
tf.Variable(tf.zeros([1],
dtype=tf.float32))
for _ in range(0, num_dense_features)
])
return dict(sparse_features_weights=[],
dense_features_weights=feature_weights)
def get_binary_predictions_for_logistic(predictions, cutoff=0.5):
return tf.cast(
tf.greater_equal(predictions, tf.ones_like(predictions) * cutoff),
dtype=tf.int32)
def get_binary_predictions_for_hinge(predictions):
return tf.cast(
tf.greater_equal(predictions, tf.zeros_like(predictions)),
dtype=tf.int32)
# Setup the single container shared across all tests. This is testing proper
# isolation across optimizers instantiated in each of the tests below.
CONTAINER = uuid.uuid4().hex
# TODO(sibyl-Mooth6ku): Add tests that exercise L1 and Shrinking.
# TODO(sibyl-vie3Poto): Refactor tests to avoid repetition of boilerplate code.
class SdcaOptimizerTest(TensorFlowTestCase):
"""Base SDCA optimizer test class for any loss type."""
def _single_threaded_test_session(self):
config = tf.ConfigProto(inter_op_parallelism_threads=1,
intra_op_parallelism_threads=1)
return self.test_session(use_gpu=False, config=config)
class SdcaWithLogisticLossTest(SdcaOptimizerTest):
"""SDCA optimizer test class for logistic loss."""
def testSimple(self):
# Setup test data
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [1.0, 1.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
self.assertAllClose(0.693147, unregularized_loss.eval())
self.assertAllClose(0.693147, loss.eval())
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# The high tolerance in unregularized_loss comparisons is due to the
# fact that it's possible to trade off unregularized_loss vs.
# regularization and still have a sum that is quite close to the
# optimal regularized_loss value. SDCA's duality gap only ensures that
# the regularized_loss is within 0.01 of optimal.
# 0.525457 is the optimal regularized_loss.
# 0.411608 is the unregularized_loss at that optimum.
self.assertAllClose(0.411608, unregularized_loss.eval(), atol=0.05)
self.assertAllClose(0.525457, loss.eval(), atol=0.01)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllEqual([0, 1], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
def testSimpleNoL2(self):
# Same as test above (so comments from above apply) but without an L2.
# The algorithm should behave as if we have an L2 of 1 in optimization but
# 0 in regularized_loss.
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [1.0, 1.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=0,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
self.assertAllClose(0.693147, unregularized_loss.eval())
self.assertAllClose(0.693147, loss.eval())
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# There is neither L1 nor L2 loss, so regularized and unregularized
# losses should be exactly the same.
self.assertAllClose(0.40244, unregularized_loss.eval(), atol=0.01)
self.assertAllClose(0.40244, loss.eval(), atol=0.01)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllEqual([0, 1], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
def testSomeUnweightedExamples(self):
# Setup test data with 4 examples, but should produce the same
# results as testSimple.
example_protos = [
# Will be used.
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
# Will be ignored.
make_example_proto(
{'age': [1],
'gender': [0]}, 0),
# Will be used.
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
# Will be ignored.
make_example_proto(
{'age': [1],
'gender': [0]}, 1),
]
example_weights = [1.0, 0.0, 1.0, 0.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
# Only use examples 0 and 2
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
self.assertAllClose(0.411608, unregularized_loss.eval(), atol=0.05)
self.assertAllClose(0.525457, loss.eval(), atol=0.01)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllClose([0, 1, 1, 1], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
def testFractionalExampleLabel(self):
# Setup test data with 1 positive, and 1 mostly-negative example.
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0.1),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [1.0, 1.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
with self.assertRaisesOpError(
'Only labels of 0.0 or 1.0 are supported right now.'):
lr.minimize().run()
def testImbalanced(self):
# Setup test data with 1 positive, and 3 negative examples.
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [2],
'gender': [0]}, 0),
make_example_proto(
{'age': [3],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [1.0, 1.0, 1.0, 1.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(3, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
self.assertAllClose(0.226487 + 0.102902,
unregularized_loss.eval(),
atol=0.08)
self.assertAllClose(0.328394 + 0.131364, loss.eval(), atol=0.01)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllEqual([0, 0, 0, 1], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
def testImbalancedWithExampleWeights(self):
# Setup test data with 1 positive, and 1 negative example.
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [3.0, 1.0]
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
self.assertAllClose(0.284860, unregularized_loss.eval(), atol=0.08)
self.assertAllClose(0.408044, loss.eval(), atol=0.012)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllEqual([0, 1], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
def testInstancesOfOneClassOnly(self):
# Setup test data with 1 positive (ignored), and 1 negative example.
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [0]}, 1), # Shares gender with the instance above.
]
example_weights = [1.0, 0.0] # Second example "omitted" from training.
for num_shards in _SHARD_NUMBERS:
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='logistic_loss')
lr = SdcaModel(CONTAINER, examples, variables, options,
num_table_shards=num_shards)
tf.initialize_all_variables().run()
unregularized_loss = lr.unregularized_loss(examples)
loss = lr.regularized_loss(examples)
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
self.assertAllClose(0.411608, unregularized_loss.eval(), atol=0.05)
self.assertAllClose(0.525457, loss.eval(), atol=0.01)
predicted_labels = get_binary_predictions_for_logistic(predictions)
self.assertAllEqual([0, 0], predicted_labels.eval())
self.assertAllClose(0.01,
lr.approximate_duality_gap().eval(),
rtol=1e-2,
atol=1e-2)
# TODO(katsiaspis): add a test for the case when examples at the end of an
# epoch are repeated, since example id may be duplicated.
class SdcaWithLinearLossTest(SdcaOptimizerTest):
"""SDCA optimizer test class for linear (squared) loss."""
def testSimple(self):
# Setup test data
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, -10.0),
make_example_proto(
{'age': [1],
'gender': [1]}, 14.0),
]
example_weights = [1.0, 1.0]
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# Predictions should be 2/3 of label due to minimizing regularized loss:
# (label - 2 * weight)^2 / 2 + L2 * 2 * weight^2
self.assertAllClose([-20.0 / 3.0, 28.0 / 3.0],
predictions.eval(),
rtol=0.005)
# Approximate gap should be very close to 0.0. (In fact, because the gap
# is only approximate, it is likely that upon convergence the duality gap
# can have a tiny negative value).
self.assertAllClose(0.00,
lr.approximate_duality_gap().eval(),
atol=1e-2)
def testL2Regularization(self):
# Setup test data
example_protos = [
# 2 identical examples
make_example_proto(
{'age': [0],
'gender': [0]}, -10.0),
make_example_proto(
{'age': [0],
'gender': [0]}, -10.0),
# 2 more identical examples
make_example_proto(
{'age': [1],
'gender': [1]}, 14.0),
make_example_proto(
{'age': [1],
'gender': [1]}, 14.0),
]
example_weights = [1.0, 1.0, 1.0, 1.0]
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=16,
symmetric_l1_regularization=0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# Predictions should be 1/5 of label due to minimizing regularized loss:
# (label - 2 * weight)^2 + L2 * 16 * weight^2
optimal1 = -10.0 / 5.0
optimal2 = 14.0 / 5.0
self.assertAllClose(
[optimal1, optimal1, optimal2, optimal2],
predictions.eval(),
rtol=0.01)
def testL1Regularization(self):
# Setup test data
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, -10.0),
make_example_proto(
{'age': [1],
'gender': [1]}, 14.0),
]
example_weights = [1.0, 1.0]
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=4.0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
prediction = lr.predictions(examples)
loss = lr.regularized_loss(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# Predictions should be -4.0, 48/5 due to minimizing regularized loss:
# (label - 2 * weight)^2 / 2 + L2 * 2 * weight^2 + L1 * 4 * weight
self.assertAllClose([-4.0, 20.0 / 3.0], prediction.eval(), rtol=0.08)
# Loss should be the sum of the regularized loss value from above per
# example after plugging in the optimal weights.
self.assertAllClose(308.0 / 6.0, loss.eval(), atol=0.01)
def testFeatureValues(self):
# Setup test data
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, -10.0, -2.0),
make_example_proto(
{'age': [1],
'gender': [1]}, 14.0, 2.0),
]
example_weights = [5.0, 3.0]
with self._single_threaded_test_session():
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1,
symmetric_l1_regularization=0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# There are 4 (sparse) variable weights to be learned. 2 for age and 2 for
# gender. Let w_1, w_2 be age weights, w_3, w_4 be gender weights, y_1,
# y_2 be the labels for examples 1 and 2 respectively and s_1, s_2 the
# corresponding *example* weights. With the given feature values, the loss
# function is given by:
# s_1/2(y_1 + 2w_1 + 2w_3)^2 + s_2/2(y_2 - 2w_2 - 2w_4)^2
# + \lambda/2 (w_1^2 + w_2^2 + w_3^2 + w_4^2). Solving for the optimal, it
# can be verified that:
# w_1* = w_3* = -2.0 s_1 y_1/(\lambda + 8 s_1) and
# w_2* = w_4* = 2 \cdot s_2 y_2/(\lambda + 8 s_2). Equivalently, due to
# regularization and example weights, the predictions are within:
# 8 \cdot s_i /(\lambda + 8 \cdot s_i) of the labels.
self.assertAllClose([-10 * 40.0 / 41.0, 14.0 * 24 / 25.0],
predictions.eval(),
atol=0.01)
def testDenseFeaturesWithDefaultWeights(self):
with self._single_threaded_test_session():
examples = make_dense_examples_dict(
dense_feature_values=[[1.0, 0.0], [0.0, 1.0]],
weights=[1.0, 1.0],
labels=[10.0, -5.0])
variables = make_dense_variable_dict(2)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# The loss function for these particular features is given by:
# 1/2(label_1-w_1)^2 + 1/2(label_2-w_2)^2 + \lambda/2 (w_1^2 + w_2^2). So,
# differentiating wrt to w_1, w_2 yields the following optimal values:
# w_1* = label_1/(\lambda + 1)= 10/2, w_2* =label_2/(\lambda + 1)= -5/2.
# In this case the (unnormalized regularized) loss will be:
# 1/2(10-5)^2 + 1/2(5-5/2)^2 + 1/2(5^2 + (5/2)^2) = 125.0/4. The actual
# loss should be further normalized by the sum of example weights.
self.assertAllClose([5.0, -2.5],
predictions.eval(),
rtol=0.01)
loss = lr.regularized_loss(examples)
self.assertAllClose(125.0 / 8.0, loss.eval(), atol=0.01)
def testDenseFeaturesWithArbitraryWeights(self):
with self._single_threaded_test_session():
examples = make_dense_examples_dict(
dense_feature_values=[[1.0, 0.0], [0.0, 1.0]],
weights=[20.0, 10.0],
labels=[10.0, -5.0])
variables = make_dense_variable_dict(2)
options = dict(symmetric_l2_regularization=5.0,
symmetric_l1_regularization=0,
loss_type='squared_loss')
lr = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = lr.predictions(examples)
train_op = lr.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# The loss function for these particular features is given by:
# 1/2 s_1 (label_1-w_1)^2 + 1/2 s_2(label_2-w_2)^2 +
# \lambda/2 (w_1^2 + w_2^2) where s_1, s_2 are the *example weights. It
# turns out that the optimal (variable) weights are given by:
# w_1* = label_1 \cdot s_1/(\lambda + s_1)= 8.0 and
# w_2* =label_2 \cdot s_2/(\lambda + s_2)= -10/3.
# In this case the (unnormalized regularized) loss will be:
# s_1/2(8-10)^2 + s_2/2(5-10/3)^2 + 5.0/2(8^2 + (10/3)^2) = 2175.0/9. The
# actual loss should be further normalized by the sum of example weights.
self.assertAllClose([8.0, -10.0/3],
predictions.eval(),
rtol=0.01)
loss = lr.regularized_loss(examples)
self.assertAllClose(2175.0 / 270.0, loss.eval(), atol=0.01)
class SdcaWithHingeLossTest(SdcaOptimizerTest):
"""SDCA optimizer test class for hinge loss."""
def testSimple(self):
# Setup test data
example_protos = [
make_example_proto(
{'age': [0],
'gender': [0]}, 0),
make_example_proto(
{'age': [1],
'gender': [1]}, 1),
]
example_weights = [1.0, 1.0]
with self.test_session(use_gpu=False):
examples = make_example_dict(example_protos, example_weights)
variables = make_variable_dict(1, 1)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=0,
loss_type='hinge_loss')
model = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
# Before minimization, the weights default to zero. There is no loss due
# to regularization, only unregularized loss which is 0.5 * (1+1) = 1.0.
predictions = model.predictions(examples)
self.assertAllClose([0.0, 0.0], predictions.eval())
unregularized_loss = model.unregularized_loss(examples)
regularized_loss = model.regularized_loss(examples)
self.assertAllClose(1.0, unregularized_loss.eval())
self.assertAllClose(1.0, regularized_loss.eval())
# After minimization, the model separates perfectly the data points. There
# are 4 sparse weights: 2 for age (say w1, w2) and 2 for gender (say w3
# and w4). Solving the system w1 + w3 = 1.0, w2 + w4 = -1.0 and minimizing
# wrt to \|\vec{w}\|_2, gives w1=w3=1/2 and w2=w4=-1/2. This gives 0.0
# unregularized loss and 0.25 L2 loss.
train_op = model.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
binary_predictions = get_binary_predictions_for_hinge(predictions)
self.assertAllEqual([-1.0, 1.0], predictions.eval())
self.assertAllEqual([0, 1], binary_predictions.eval())
self.assertAllClose(0.0, unregularized_loss.eval())
self.assertAllClose(0.25, regularized_loss.eval(), atol=0.05)
def testDenseFeaturesPerfectlySeparable(self):
with self._single_threaded_test_session():
examples = make_dense_examples_dict(
dense_feature_values=[[1.0, 1.0], [1.0, -1.0]],
weights=[1.0, 1.0],
labels=[1.0, 0.0])
variables = make_dense_variable_dict(2)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=0,
loss_type='hinge_loss')
model = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = model.predictions(examples)
binary_predictions = get_binary_predictions_for_hinge(predictions)
train_op = model.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
self.assertAllClose([1.0, -1.0], predictions.eval(), atol=0.05)
self.assertAllEqual([1, 0], binary_predictions.eval())
# (1.0, 1.0) and (1.0, -1.0) are perfectly separable by x-axis (that is,
# the SVM's functional margin >=1), so the unregularized loss is ~0.0.
# There is only loss due to l2-regularization. For these datapoints, it
# turns out that w_1~=0.0 and w_2~=1.0 which means that l2 loss is ~0.25.
unregularized_loss = model.unregularized_loss(examples)
regularized_loss = model.regularized_loss(examples)
self.assertAllClose(0.0, unregularized_loss.eval(), atol=0.02)
self.assertAllClose(0.25, regularized_loss.eval(), atol=0.02)
def testDenseFeaturesSeparableWithinMargins(self):
with self._single_threaded_test_session():
examples = make_dense_examples_dict(
dense_feature_values=[[1.0, 1.0], [0.5, -0.5]],
weights=[1.0, 1.0],
labels=[1.0, 0.0])
variables = make_dense_variable_dict(2)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=0,
loss_type='hinge_loss')
model = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = model.predictions(examples)
binary_predictions = get_binary_predictions_for_hinge(predictions)
train_op = model.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# (1.0, 0.5) and (1.0, -0.5) are separable by x-axis but the datapoints
# are within the margins so there is unregularized loss (1/2 per example).
# For these datapoints, optimal weights are w_1~=0.0 and w_2~=1.0 which
# gives an L2 loss of ~0.25.
self.assertAllClose([0.5, -0.5], predictions.eval(), rtol=0.05)
self.assertAllEqual([1, 0], binary_predictions.eval())
unregularized_loss = model.unregularized_loss(examples)
regularized_loss = model.regularized_loss(examples)
self.assertAllClose(0.5, unregularized_loss.eval(), atol=0.02)
self.assertAllClose(0.75, regularized_loss.eval(), atol=0.02)
def testDenseFeaturesWeightedExamples(self):
with self._single_threaded_test_session():
examples = make_dense_examples_dict(
dense_feature_values=[[1.0, 1.0], [0.5, -0.5]],
weights=[3.0, 1.0],
labels=[1.0, 0.0])
variables = make_dense_variable_dict(2)
options = dict(symmetric_l2_regularization=1.0,
symmetric_l1_regularization=0,
loss_type='hinge_loss')
model = SdcaModel(CONTAINER, examples, variables, options)
tf.initialize_all_variables().run()
predictions = model.predictions(examples)
binary_predictions = get_binary_predictions_for_hinge(predictions)
train_op = model.minimize()
for _ in range(_MAX_ITERATIONS):
train_op.run()
# Point (1.0, 0.5) has higher weight than (1.0, -0.5) so the model will
# try to increase the margin from (1.0, 0.5). Due to regularization,
# (1.0, -0.5) will be within the margin. For these points and example
# weights, the optimal weights are w_1~=0.4 and w_2~=1.2 which give an L2
# loss of 0.5 * 0.25 * 0.25 * 1.6 = 0.2. The binary predictions will be
# correct, but the boundary will be much closer to the 2nd point than the
# first one.
self.assertAllClose([1.0, -0.2], predictions.eval(), atol=0.05)
self.assertAllEqual([1, 0], binary_predictions.eval())
unregularized_loss = model.unregularized_loss(examples)
regularized_loss = model.regularized_loss(examples)
self.assertAllClose(0.2, unregularized_loss.eval(), atol=0.02)
self.assertAllClose(0.4, regularized_loss.eval(), atol=0.02)
class SdcaFprintTest(TensorFlowTestCase):
"""Tests for the SdcaFprint op.
This is one way of enforcing the platform-agnostic nature of SdcaFprint.
Basically we are checking against exact values and this test could be running
across different platforms. Note that it is fine for expected values to change
in the future, if the implementation of SdcaFprint changes (ie this is *not* a
frozen test).
"""
def testFprint(self):
with self.test_session():
in_data = tf.constant(['abc', 'very looooooong string', 'def'])
out_data = _sdca_ops.sdca_fprint(in_data)
self.assertAllEqual([b'a085f09013029e45-3980b2afd2126c04',
b'bc5a254df959f26c-512e479a50910f9f',
b'79999cd817a03f12-085f182230e03022'],
out_data.eval())
class ShardedMutableHashTableTest(TensorFlowTestCase):
"""Tests for the _ShardedMutableHashTable class."""
def testShardedMutableHashTable(self):
for num_shards in [1, 3, 10]:
with self.test_session():
default_val = -1
keys = tf.constant(['brain', 'salad', 'surgery'])
values = tf.constant([0, 1, 2], tf.int64)
table = _ShardedMutableHashTable(tf.string,
tf.int64,
default_val,
num_shards=num_shards)
self.assertAllEqual(0, table.size().eval())
table.insert(keys, values).run()
self.assertAllEqual(3, table.size().eval())
input_string = tf.constant(['brain', 'salad', 'tank'])
output = table.lookup(input_string)
self.assertAllEqual([3], output.get_shape())
result = output.eval()
self.assertAllEqual([0, 1, -1], result)
self.assertAllEqual(3, table.values_reduce_sum().eval())
if __name__ == '__main__':
googletest.main()
|
apache-2.0
|
TNT-Samuel/Coding-Projects
|
DNS Server/Source/Tools/scripts/linktree.py
|
67
|
2440
|
#! /usr/bin/env python3
# linktree
#
# Make a copy of a directory tree with symbolic links to all files in the
# original tree.
# All symbolic links go to a special symbolic link at the top, so you
# can easily fix things if the original source tree moves.
# See also "mkreal".
#
# usage: mklinks oldtree newtree
import sys, os
LINK = '.LINK' # Name of special symlink at the top.
debug = 0
def main():
if not 3 <= len(sys.argv) <= 4:
print('usage:', sys.argv[0], 'oldtree newtree [linkto]')
return 2
oldtree, newtree = sys.argv[1], sys.argv[2]
if len(sys.argv) > 3:
link = sys.argv[3]
link_may_fail = 1
else:
link = LINK
link_may_fail = 0
if not os.path.isdir(oldtree):
print(oldtree + ': not a directory')
return 1
try:
os.mkdir(newtree, 0o777)
except OSError as msg:
print(newtree + ': cannot mkdir:', msg)
return 1
linkname = os.path.join(newtree, link)
try:
os.symlink(os.path.join(os.pardir, oldtree), linkname)
except OSError as msg:
if not link_may_fail:
print(linkname + ': cannot symlink:', msg)
return 1
else:
print(linkname + ': warning: cannot symlink:', msg)
linknames(oldtree, newtree, link)
return 0
def linknames(old, new, link):
if debug: print('linknames', (old, new, link))
try:
names = os.listdir(old)
except OSError as msg:
print(old + ': warning: cannot listdir:', msg)
return
for name in names:
if name not in (os.curdir, os.pardir):
oldname = os.path.join(old, name)
linkname = os.path.join(link, name)
newname = os.path.join(new, name)
if debug > 1: print(oldname, newname, linkname)
if os.path.isdir(oldname) and \
not os.path.islink(oldname):
try:
os.mkdir(newname, 0o777)
ok = 1
except:
print(newname + \
': warning: cannot mkdir:', msg)
ok = 0
if ok:
linkname = os.path.join(os.pardir,
linkname)
linknames(oldname, newname, linkname)
else:
os.symlink(linkname, newname)
if __name__ == '__main__':
sys.exit(main())
|
gpl-3.0
|
labordoc/labordoc-next
|
modules/websearch/web/admin/websearchadmin.py
|
25
|
45852
|
## This file is part of Invenio.
## Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio WebSearch Administrator Interface."""
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
import invenio.websearchadminlib as wsc
from invenio.bibrankadminlib import check_user
from invenio.webpage import page, adderrorbox, error_page
from invenio.config import CFG_SITE_URL, CFG_SITE_SECURE_URL, CFG_SITE_LANG, CFG_SITE_NAME
from invenio.webuser import getUid, page_not_authorized
from invenio.messages import gettext_set_language
from invenio.urlutils import wash_url_argument
def switchfmtscore(req, colID, type, id_1, id_2, ln=CFG_SITE_LANG):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_switchfmtscore(colID=colID,
ln=ln,
type=type,
id_1=id_1,
id_2=id_2),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def switchfldscore(req, colID, id_1, id_2, fmeth, ln=CFG_SITE_LANG):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_switchfldscore(colID=colID,
ln=ln,
id_1=id_1,
id_2=id_2,
fmeth=fmeth),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def switchfldvaluescore(req, colID, id_1, id_fldvalue_1, id_fldvalue_2, ln=CFG_SITE_LANG):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_switchfldvaluescore(colID=colID,
ln=ln,
id_1=id_1,
id_fldvalue_1=id_fldvalue_1,
id_fldvalue_2=id_fldvalue_2),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def runwebcoll(req, colID, ln=CFG_SITE_LANG, confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_checkwebcollstatus(colID=colID,
ln=ln,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def switchpbxscore(req, colID, id_1, id_2, sel_ln,ln=CFG_SITE_LANG):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_switchpbxscore(colID=colID,
ln=ln,
id_1=id_1,
id_2=id_2,
sel_ln=sel_ln),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifydbquery(req, colID, ln=CFG_SITE_LANG, dbquery='', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifydbquery(colID=colID,
ln=ln,
dbquery=dbquery,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showtree(req, colID, ln=CFG_SITE_LANG):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Collection tree",
body=wsc.perform_showtree(colID=colID,
ln=ln),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifytranslations(req, colID, ln=CFG_SITE_LANG, sel_type='', trans = [], confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifytranslations(colID=colID,
ln=ln,
sel_type=sel_type,
trans=trans,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addcollectiontotree(req, colID, ln=CFG_SITE_LANG, add_dad='', add_son='', rtype='', mtype='', callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_addcollectiontotree(colID=colID,
ln=CFG_SITE_LANG,
add_dad=add_dad,
add_son=add_son,
rtype=rtype,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
navtrail = navtrail_previous_links,
req=req,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addcollection(req, colID, ln=CFG_SITE_LANG, colNAME='', dbquery='', callback="yes", confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_addcollection(colID=colID,
ln=CFG_SITE_LANG,
colNAME=colNAME,
dbquery=dbquery,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
navtrail = navtrail_previous_links,
req=req,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifyrankmethods(req, colID, ln=CFG_SITE_LANG, func='', rnkID='', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifyrankmethods(colID=colID,
ln=ln,
func=func,
rnkID=rnkID,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def deletecollection(req, colID, ln=CFG_SITE_LANG, confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_deletecollection(colID=colID,
ln=ln,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def editcollection(req, colID=1, ln=CFG_SITE_LANG, mtype=''):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_editcollection(colID=colID,
ln=ln,
mtype=mtype),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def checkexternalcollections(req, colID, ln=CFG_SITE_LANG, icl=None, update="", confirm=0, callback='yes'):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_checkexternalcollections(colID=colID,
ln=ln,
icl=icl,
update=update,
confirm=confirm,
callback=callback),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showoutputformats(req, colID, ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_showoutputformats(colID=colID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addexistingoutputformat(req, colID, ln=CFG_SITE_LANG, fmtID=-1, callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addexistingoutputformat(colID=colID,
ln=ln,
fmtID=fmtID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def deleteoutputformat(req, colID, ln=CFG_SITE_LANG, fmtID=-1, callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_deleteoutputformat(colID=colID,
ln=ln,
fmtID=fmtID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def removeoutputformat(req, colID, ln=CFG_SITE_LANG, fmtID='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_removeoutputformat(colID=colID,
ln=ln,
fmtID=fmtID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def update_external_collections(req, colID, ln=CFG_SITE_LANG, state=None, recurse=None):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body = wsc.perform_update_external_collections(colID, ln, state, recurse),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def update_detailed_record_options(req, colID, ln=CFG_SITE_LANG, tabs=[], recurse=0):
"""Update the preferences for the tab to show/hide in the detailed record page. """
_tabs = wash_url_argument(tabs, 'list')
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body = wsc.perform_update_detailed_record_options(colID, ln, _tabs, recurse),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def removefieldvalue(req, colID, ln=CFG_SITE_LANG, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_removefieldvalue(colID=colID,
ln=ln,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def removefield(req, colID, ln=CFG_SITE_LANG, fldID='', fldvID='', fmeth='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_removefield(colID=colID,
ln=ln,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifyfield(req, colID, fldID, fldvID='', ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifyfield(colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showsearchoptions(req, colID, ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_showsearchoptions(colID=colID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addexistingfield(req, colID, ln=CFG_SITE_LANG, fldID=-1, fldvID=-1, fmeth='', callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addexistingfield(colID=colID,
ln=ln,
fldID=fldID,
fldvID=fldvID,
fmeth=fmeth,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page(title='Authorization failure',
uid=uid,
body=adderrorbox('try to login first',
datalist=["""You are not a user authorized to perform admin tasks, try to
<a href="%s/youraccount/login?referer=%s/admin/websearch/websearchadmin.py/">login</a> with another account.""" % (CFG_SITE_SECURE_URL, CFG_SITE_URL)]),
navtrail= navtrail_previous_links,
lastupdated=__lastupdated__)
def rearrangefield(req, colID, ln=CFG_SITE_LANG, fmeth='', callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_rearrangefield(colID=colID,
ln=ln,
fmeth=fmeth,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page(title='Authorization failure',
uid=uid,
body=adderrorbox('try to login first',
datalist=["""You are not a user authorized to perform admin tasks, try to
<a href="%s/youraccount/login?referer=%s/admin/websearch/websearchadmin.py/">login</a> with another account.""" % (CFG_SITE_SECURE_URL, CFG_SITE_URL)]),
navtrail= navtrail_previous_links,
lastupdated=__lastupdated__)
def addexistingfieldvalue(req, colID, fldID, ln=CFG_SITE_LANG, callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addexistingfieldvalue(colID=colID,
ln=ln,
fldID=fldID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page(title='Authorization failure',
uid=uid,
body=adderrorbox('try to login first',
datalist=["""You are not a user authorized to perform admin tasks, try to
<a href="%s/youraccount/login?referer=%s/admin/websearch/websearchadmin.py/">login</a> with another account.""" % (CFG_SITE_SECURE_URL, CFG_SITE_URL)]),
navtrail= navtrail_previous_links,
lastupdated=__lastupdated__)
def rearrangefieldvalue(req, colID, fldID, ln=CFG_SITE_LANG, callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_rearrangefieldvalue(colID=colID,
ln=ln,
fldID=fldID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page(title='Authorization failure',
uid=uid,
body=adderrorbox('try to login first',
datalist=["""You are not a user authorized to perform admin tasks, try to
<a href="%s/youraccount/login?referer=%s/admin/websearch/websearchadmin.py/">login</a> with another account.""" % (CFG_SITE_SECURE_URL, CFG_SITE_URL)]),
navtrail= navtrail_previous_links,
lastupdated=__lastupdated__)
def addnewfieldvalue(req, colID, fldID, ln=CFG_SITE_LANG, name='', value='', callback="yes", confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addnewfieldvalue(colID=colID,
fldID=fldID,
ln=ln,
name=name,
value=value,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
navtrail = navtrail_previous_links,
req=req,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifyfieldvalue(req, colID, fldID, fldvID, ln=CFG_SITE_LANG, name='', value='', callback="yes", confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifyfieldvalue(colID=colID,
fldID=fldID,
fldvID=fldvID,
ln=ln,
name=name,
value=value,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
navtrail = navtrail_previous_links,
req=req,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showsearchfields(req, colID, ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_showsearchfields(colID=colID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showsortoptions(req, colID, ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_showsortoptions(colID=colID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifyportalbox(req, colID, ln=CFG_SITE_LANG, pbxID=-1, score='', position='', sel_ln='', title='', body='', callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_modifyportalbox(colID=colID,
ln=ln,
pbxID=pbxID,
score=score,
position=position,
sel_ln=sel_ln,
title=title,
body=body,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def removeportalbox(req, colID, ln=CFG_SITE_LANG, pbxID='', sel_ln='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_removeportalbox(colID=colID,
ln=ln,
pbxID=pbxID,
sel_ln=sel_ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addexistingportalbox(req, colID, ln=CFG_SITE_LANG, pbxID=-1, score=0, position='', sel_ln='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addexistingportalbox(colID=colID,
ln=ln,
pbxID=pbxID,
score=score,
position=position,
sel_ln=sel_ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page(title='Authorization failure',
uid=uid,
body=adderrorbox('try to login first',
datalist=["""You are not a user authorized to perform admin tasks, try to
<a href="%s/youraccount/login?referer=%s/admin/websearch/websearchadmin.py/">login</a> with another account.""" % (CFG_SITE_SECURE_URL, CFG_SITE_URL)]),
navtrail= navtrail_previous_links,
lastupdated=__lastupdated__)
def deleteportalbox(req, colID, ln=CFG_SITE_LANG, pbxID=-1, callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_deleteportalbox(colID=colID,
ln=ln,
pbxID=pbxID,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def showportalboxes(req, colID, ln=CFG_SITE_LANG, callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_showportalboxes(colID=colID,
ln=ln,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def addportalbox(req, colID, ln=CFG_SITE_LANG, title='', body='', callback='yes', confirm=-1):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="Edit Collection",
body=wsc.perform_addportalbox(colID=colID,
ln=ln,
title=title,
body=body,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def modifycollectiontree(req, colID, ln=CFG_SITE_LANG, move_up='', move_down='', move_from='', move_to='', delete='', rtype='', callback='yes', confirm=0):
navtrail_previous_links = wsc.getnavtrail() + """> <a class="navtrail" href="%s/admin/websearch/websearchadmin.py/">WebSearch Admin</a> """ % (CFG_SITE_URL)
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_modifycollectiontree(colID=colID,
ln=ln,
move_up=move_up,
move_down=move_down,
move_from=move_from,
move_to=move_to,
delete=delete,
rtype=rtype,
callback=callback,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def index(req, colID=1, ln=CFG_SITE_LANG, mtype='', content='', confirm=0):
navtrail_previous_links = wsc.getnavtrail()
try:
uid = getUid(req)
except:
return error_page('Error', req)
auth = check_user(req,'cfgwebsearch')
if not auth[0]:
return page(title="WebSearch Admin",
body=wsc.perform_index(colID=colID,
ln=ln,
mtype=mtype,
content=content,
confirm=confirm),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
|
gpl-2.0
|
mrquim/repository.mrquim
|
script.module.youtube.dl/lib/youtube_dl/extractor/addanime.py
|
48
|
3315
|
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_HTTPError,
compat_str,
compat_urllib_parse_urlencode,
compat_urllib_parse_urlparse,
)
from ..utils import (
ExtractorError,
qualities,
)
class AddAnimeIE(InfoExtractor):
_VALID_URL = r'https?://(?:\w+\.)?add-anime\.net/(?:watch_video\.php\?(?:.*?)v=|video/)(?P<id>[\w_]+)'
_TESTS = [{
'url': 'http://www.add-anime.net/watch_video.php?v=24MR3YO5SAS9',
'md5': '72954ea10bc979ab5e2eb288b21425a0',
'info_dict': {
'id': '24MR3YO5SAS9',
'ext': 'mp4',
'description': 'One Piece 606',
'title': 'One Piece 606',
},
'skip': 'Video is gone',
}, {
'url': 'http://add-anime.net/video/MDUGWYKNGBD8/One-Piece-687',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
try:
webpage = self._download_webpage(url, video_id)
except ExtractorError as ee:
if not isinstance(ee.cause, compat_HTTPError) or \
ee.cause.code != 503:
raise
redir_webpage = ee.cause.read().decode('utf-8')
action = self._search_regex(
r'<form id="challenge-form" action="([^"]+)"',
redir_webpage, 'Redirect form')
vc = self._search_regex(
r'<input type="hidden" name="jschl_vc" value="([^"]+)"/>',
redir_webpage, 'redirect vc value')
av = re.search(
r'a\.value = ([0-9]+)[+]([0-9]+)[*]([0-9]+);',
redir_webpage)
if av is None:
raise ExtractorError('Cannot find redirect math task')
av_res = int(av.group(1)) + int(av.group(2)) * int(av.group(3))
parsed_url = compat_urllib_parse_urlparse(url)
av_val = av_res + len(parsed_url.netloc)
confirm_url = (
parsed_url.scheme + '://' + parsed_url.netloc +
action + '?' +
compat_urllib_parse_urlencode({
'jschl_vc': vc, 'jschl_answer': compat_str(av_val)}))
self._download_webpage(
confirm_url, video_id,
note='Confirming after redirect')
webpage = self._download_webpage(url, video_id)
FORMATS = ('normal', 'hq')
quality = qualities(FORMATS)
formats = []
for format_id in FORMATS:
rex = r"var %s_video_file = '(.*?)';" % re.escape(format_id)
video_url = self._search_regex(rex, webpage, 'video file URLx',
fatal=False)
if not video_url:
continue
formats.append({
'format_id': format_id,
'url': video_url,
'quality': quality(format_id),
})
self._sort_formats(formats)
video_title = self._og_search_title(webpage)
video_description = self._og_search_description(webpage)
return {
'_type': 'video',
'id': video_id,
'formats': formats,
'title': video_title,
'description': video_description
}
|
gpl-2.0
|
crowdhackathon-transport/optimizers
|
crowdstance-api/venv/lib/python2.7/site-packages/werkzeug/script.py
|
116
|
11365
|
# -*- coding: utf-8 -*-
r'''
werkzeug.script
~~~~~~~~~~~~~~~
.. admonition:: Deprecated Functionality
``werkzeug.script`` is deprecated without replacement functionality.
Python's command line support improved greatly with :mod:`argparse`
and a bunch of alternative modules.
Most of the time you have recurring tasks while writing an application
such as starting up an interactive python interpreter with some prefilled
imports, starting the development server, initializing the database or
something similar.
For that purpose werkzeug provides the `werkzeug.script` module which
helps you writing such scripts.
Basic Usage
-----------
The following snippet is roughly the same in every werkzeug script::
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from werkzeug import script
# actions go here
if __name__ == '__main__':
script.run()
Starting this script now does nothing because no actions are defined.
An action is a function in the same module starting with ``"action_"``
which takes a number of arguments where every argument has a default. The
type of the default value specifies the type of the argument.
Arguments can then be passed by position or using ``--name=value`` from
the shell.
Because a runserver and shell command is pretty common there are two
factory functions that create such commands::
def make_app():
from yourapplication import YourApplication
return YourApplication(...)
action_runserver = script.make_runserver(make_app, use_reloader=True)
action_shell = script.make_shell(lambda: {'app': make_app()})
Using The Scripts
-----------------
The script from above can be used like this from the shell now:
.. sourcecode:: text
$ ./manage.py --help
$ ./manage.py runserver localhost 8080 --debugger --no-reloader
$ ./manage.py runserver -p 4000
$ ./manage.py shell
As you can see it's possible to pass parameters as positional arguments
or as named parameters, pretty much like Python function calls.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
'''
from __future__ import print_function
import sys
import inspect
import getopt
from os.path import basename
from werkzeug._compat import iteritems
argument_types = {
bool: 'boolean',
str: 'string',
int: 'integer',
float: 'float'
}
converters = {
'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'),
'string': str,
'integer': int,
'float': float
}
def run(namespace=None, action_prefix='action_', args=None):
"""Run the script. Participating actions are looked up in the caller's
namespace if no namespace is given, otherwise in the dict provided.
Only items that start with action_prefix are processed as actions. If
you want to use all items in the namespace provided as actions set
action_prefix to an empty string.
:param namespace: An optional dict where the functions are looked up in.
By default the local namespace of the caller is used.
:param action_prefix: The prefix for the functions. Everything else
is ignored.
:param args: the arguments for the function. If not specified
:data:`sys.argv` without the first argument is used.
"""
if namespace is None:
namespace = sys._getframe(1).f_locals
actions = find_actions(namespace, action_prefix)
if args is None:
args = sys.argv[1:]
if not args or args[0] in ('-h', '--help'):
return print_usage(actions)
elif args[0] not in actions:
fail('Unknown action \'%s\'' % args[0])
arguments = {}
types = {}
key_to_arg = {}
long_options = []
formatstring = ''
func, doc, arg_def = actions[args.pop(0)]
for idx, (arg, shortcut, default, option_type) in enumerate(arg_def):
real_arg = arg.replace('-', '_')
if shortcut:
formatstring += shortcut
if not isinstance(default, bool):
formatstring += ':'
key_to_arg['-' + shortcut] = real_arg
long_options.append(isinstance(default, bool) and arg or arg + '=')
key_to_arg['--' + arg] = real_arg
key_to_arg[idx] = real_arg
types[real_arg] = option_type
arguments[real_arg] = default
try:
optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options)
except getopt.GetoptError as e:
fail(str(e))
specified_arguments = set()
for key, value in enumerate(posargs):
try:
arg = key_to_arg[key]
except IndexError:
fail('Too many parameters')
specified_arguments.add(arg)
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for argument %s (%s): %s' % (key, arg, value))
for key, value in optlist:
arg = key_to_arg[key]
if arg in specified_arguments:
fail('Argument \'%s\' is specified twice' % arg)
if types[arg] == 'boolean':
if arg.startswith('no_'):
value = 'no'
else:
value = 'yes'
try:
arguments[arg] = converters[types[arg]](value)
except ValueError:
fail('Invalid value for \'%s\': %s' % (key, value))
newargs = {}
for k, v in iteritems(arguments):
newargs[k.startswith('no_') and k[3:] or k] = v
arguments = newargs
return func(**arguments)
def fail(message, code=-1):
"""Fail with an error."""
print('Error: %s' % message, file=sys.stderr)
sys.exit(code)
def find_actions(namespace, action_prefix):
"""Find all the actions in the namespace."""
actions = {}
for key, value in iteritems(namespace):
if key.startswith(action_prefix):
actions[key[len(action_prefix):]] = analyse_action(value)
return actions
def print_usage(actions):
"""Print the usage information. (Help screen)"""
actions = sorted(iteritems(actions))
print('usage: %s <action> [<options>]' % basename(sys.argv[0]))
print(' %s --help' % basename(sys.argv[0]))
print()
print('actions:')
for name, (func, doc, arguments) in actions:
print(' %s:' % name)
for line in doc.splitlines():
print(' %s' % line)
if arguments:
print()
for arg, shortcut, default, argtype in arguments:
if isinstance(default, bool):
print(' %s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg
))
else:
print(' %-30s%-10s%s' % (
(shortcut and '-%s, ' % shortcut or '') + '--' + arg,
argtype, default
))
print()
def analyse_action(func):
"""Analyse a function."""
description = inspect.getdoc(func) or 'undocumented action'
arguments = []
args, varargs, kwargs, defaults = inspect.getargspec(func)
if varargs or kwargs:
raise TypeError('variable length arguments for action not allowed.')
if len(args) != len(defaults or ()):
raise TypeError('not all arguments have proper definitions')
for idx, (arg, definition) in enumerate(zip(args, defaults or ())):
if arg.startswith('_'):
raise TypeError('arguments may not start with an underscore')
if not isinstance(definition, tuple):
shortcut = None
default = definition
else:
shortcut, default = definition
argument_type = argument_types[type(default)]
if isinstance(default, bool) and default is True:
arg = 'no-' + arg
arguments.append((arg.replace('_', '-'), shortcut,
default, argument_type))
return func, description, arguments
def make_shell(init_func=None, banner=None, use_ipython=True):
"""Returns an action callback that spawns a new interactive
python shell.
:param init_func: an optional initialization function that is
called before the shell is started. The return
value of this function is the initial namespace.
:param banner: the banner that is displayed before the shell. If
not specified a generic banner is used instead.
:param use_ipython: if set to `True` ipython is used if available.
"""
if banner is None:
banner = 'Interactive Werkzeug Shell'
if init_func is None:
init_func = dict
def action(ipython=use_ipython):
"""Start a new interactive python session."""
namespace = init_func()
if ipython:
try:
try:
from IPython.frontend.terminal.embed import InteractiveShellEmbed
sh = InteractiveShellEmbed(banner1=banner)
except ImportError:
from IPython.Shell import IPShellEmbed
sh = IPShellEmbed(banner=banner)
except ImportError:
pass
else:
sh(global_ns={}, local_ns=namespace)
return
from code import interact
interact(banner, local=namespace)
return action
def make_runserver(app_factory, hostname='localhost', port=5000,
use_reloader=False, use_debugger=False, use_evalex=True,
threaded=False, processes=1, static_files=None,
extra_files=None, ssl_context=None):
"""Returns an action callback that spawns a new development server.
.. versionadded:: 0.5
`static_files` and `extra_files` was added.
..versionadded:: 0.6.1
`ssl_context` was added.
:param app_factory: a function that returns a new WSGI application.
:param hostname: the default hostname the server should listen on.
:param port: the default port of the server.
:param use_reloader: the default setting for the reloader.
:param use_evalex: the default setting for the evalex flag of the debugger.
:param threaded: the default threading setting.
:param processes: the default number of processes to start.
:param static_files: optional dict of static files.
:param extra_files: optional list of extra files to track for reloading.
:param ssl_context: optional SSL context for running server in HTTPS mode.
"""
def action(hostname=('h', hostname), port=('p', port),
reloader=use_reloader, debugger=use_debugger,
evalex=use_evalex, threaded=threaded, processes=processes):
"""Start a new development server."""
from werkzeug.serving import run_simple
app = app_factory()
run_simple(hostname, port, app,
use_reloader=reloader, use_debugger=debugger,
use_evalex=evalex, extra_files=extra_files,
reloader_interval=1, threaded=threaded, processes=processes,
static_files=static_files, ssl_context=ssl_context)
return action
|
mit
|
patrickhartling/maestro
|
playpen/win_pageant.py
|
2
|
4475
|
# Copyright (C) 2005 John Arbash-Meinel <john@arbash-meinel.com>
# Modified up by: Todd Whiteman <ToddW@ActiveState.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distrubuted in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
"""
Functions for communicating with Pageant, the basic windows ssh agent program.
"""
import os
import struct
import tempfile
import mmap
import array
# if you're on windows, you should have these, i guess?
try:
import win32ui
_has_win32all = True
except ImportError:
_has_win32all = False
_AGENT_COPYDATA_ID = 0x804e50ba
_AGENT_MAX_MSGLEN = 8192
# Note: The WM_COPYDATA value is pulled from win32con, as a workaround
# so we do not have to import this huge library just for this one variable.
win32con_WM_COPYDATA = 74
def _get_pageant_window_object():
try:
hwnd = win32ui.FindWindow('Pageant', 'Pageant')
return hwnd
except win32ui.error:
pass
return None
def can_talk_to_agent():
"""
Check to see if there is a "Pageant" agent we can talk to.
This checks both if we have the required libraries (win32all)
and if there is a Pageant currently running.
"""
if not _has_win32all or not _get_pageant_window_object():
return False
return True
def _query_pageant(msg):
hwnd = _get_pageant_window_object()
if not hwnd:
# Raise a failure to connect exception, pageant isn't running anymore!
return None
# Write our pageant request string into the file (pageant will read this to determine what to do)
filename = tempfile.mktemp('.pag')
map_filename = os.path.basename(filename)
f = open(filename, 'w+b')
f.write(msg )
# Ensure the rest of the file is empty, otherwise pageant will read this
f.write('\0' * (_AGENT_MAX_MSGLEN - len(msg)))
# Create the shared file map that pageant will use to read from
pymap = mmap.mmap(f.fileno(), _AGENT_MAX_MSGLEN, tagname=map_filename, access=mmap.ACCESS_WRITE)
try:
# Create an array buffer containing the mapped filename
char_buffer = array.array("c", map_filename + '\0')
char_buffer_address, char_buffer_size = char_buffer.buffer_info()
# Create a string to use for the SendMessage function call
cds = struct.pack("LLP", _AGENT_COPYDATA_ID, char_buffer_size, char_buffer_address)
response = hwnd.SendMessage(win32con_WM_COPYDATA, cds)
if response > 0:
datalen = pymap.read(4)
retlen = struct.unpack('>I', datalen)[0]
return datalen + pymap.read(retlen)
return None
finally:
pymap.close()
f.close()
# Remove the file, it was temporary only
os.unlink(filename)
class PageantConnection (object):
"""
Mock "connection" to an agent which roughly approximates the behavior of
a unix local-domain socket (as used by Agent). Requests are sent to the
pageant daemon via special Windows magick, and responses are buffered back
for subsequent reads.
"""
def __init__(self):
self._response = None
def send(self, data):
self._response = _query_pageant(data)
def recv(self, n):
if self._response is None:
return ''
ret = self._response[:n]
self._response = self._response[n:]
if self._response == '':
self._response = None
return ret
def close(self):
pass
if '__main__' == __name__:
SSH2_AGENTC_REQUEST_IDENTITIES, SSH2_AGENT_IDENTITIES_ANSWER, \
SSH2_AGENTC_SIGN_REQUEST, SSH2_AGENT_SIGN_RESPONSE = range(11, 15)
if can_talk_to_agent():
cmd = chr(SSH2_AGENTC_REQUEST_IDENTITIES)
msg = (struct.pack('>I', len(cmd))+cmd)
result = _query_pageant(msg)
import base64
print base64.encodestring(result)
|
gpl-2.0
|
santidediego/LearningDjango
|
lib/python3.5/site-packages/django/apps/config.py
|
121
|
8077
|
import os
from importlib import import_module
from django.core.exceptions import AppRegistryNotReady, ImproperlyConfigured
from django.utils._os import upath
from django.utils.module_loading import module_has_submodule
MODELS_MODULE_NAME = 'models'
class AppConfig(object):
"""
Class representing a Django application and its configuration.
"""
def __init__(self, app_name, app_module):
# Full Python path to the application eg. 'django.contrib.admin'.
self.name = app_name
# Root module for the application eg. <module 'django.contrib.admin'
# from 'django/contrib/admin/__init__.pyc'>.
self.module = app_module
# The following attributes could be defined at the class level in a
# subclass, hence the test-and-set pattern.
# Last component of the Python path to the application eg. 'admin'.
# This value must be unique across a Django project.
if not hasattr(self, 'label'):
self.label = app_name.rpartition(".")[2]
# Human-readable name for the application eg. "Admin".
if not hasattr(self, 'verbose_name'):
self.verbose_name = self.label.title()
# Filesystem path to the application directory eg.
# u'/usr/lib/python2.7/dist-packages/django/contrib/admin'. Unicode on
# Python 2 and a str on Python 3.
if not hasattr(self, 'path'):
self.path = self._path_from_module(app_module)
# Module containing models eg. <module 'django.contrib.admin.models'
# from 'django/contrib/admin/models.pyc'>. Set by import_models().
# None if the application doesn't have a models module.
self.models_module = None
# Mapping of lower case model names to model classes. Initially set to
# None to prevent accidental access before import_models() runs.
self.models = None
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.label)
def _path_from_module(self, module):
"""Attempt to determine app's filesystem path from its module."""
# See #21874 for extended discussion of the behavior of this method in
# various cases.
# Convert paths to list because Python 3.3 _NamespacePath does not
# support indexing.
paths = list(getattr(module, '__path__', []))
if len(paths) != 1:
filename = getattr(module, '__file__', None)
if filename is not None:
paths = [os.path.dirname(filename)]
if len(paths) > 1:
raise ImproperlyConfigured(
"The app module %r has multiple filesystem locations (%r); "
"you must configure this app with an AppConfig subclass "
"with a 'path' class attribute." % (module, paths))
elif not paths:
raise ImproperlyConfigured(
"The app module %r has no filesystem location, "
"you must configure this app with an AppConfig subclass "
"with a 'path' class attribute." % (module,))
return upath(paths[0])
@classmethod
def create(cls, entry):
"""
Factory that creates an app config from an entry in INSTALLED_APPS.
"""
try:
# If import_module succeeds, entry is a path to an app module,
# which may specify an app config class with default_app_config.
# Otherwise, entry is a path to an app config class or an error.
module = import_module(entry)
except ImportError:
# Track that importing as an app module failed. If importing as an
# app config class fails too, we'll trigger the ImportError again.
module = None
mod_path, _, cls_name = entry.rpartition('.')
# Raise the original exception when entry cannot be a path to an
# app config class.
if not mod_path:
raise
else:
try:
# If this works, the app module specifies an app config class.
entry = module.default_app_config
except AttributeError:
# Otherwise, it simply uses the default app config class.
return cls(entry, module)
else:
mod_path, _, cls_name = entry.rpartition('.')
# If we're reaching this point, we must attempt to load the app config
# class located at <mod_path>.<cls_name>
mod = import_module(mod_path)
try:
cls = getattr(mod, cls_name)
except AttributeError:
if module is None:
# If importing as an app module failed, that error probably
# contains the most informative traceback. Trigger it again.
import_module(entry)
else:
raise
# Check for obvious errors. (This check prevents duck typing, but
# it could be removed if it became a problem in practice.)
if not issubclass(cls, AppConfig):
raise ImproperlyConfigured(
"'%s' isn't a subclass of AppConfig." % entry)
# Obtain app name here rather than in AppClass.__init__ to keep
# all error checking for entries in INSTALLED_APPS in one place.
try:
app_name = cls.name
except AttributeError:
raise ImproperlyConfigured(
"'%s' must supply a name attribute." % entry)
# Ensure app_name points to a valid module.
app_module = import_module(app_name)
# Entry is a path to an app config class.
return cls(app_name, app_module)
def check_models_ready(self):
"""
Raises an exception if models haven't been imported yet.
"""
if self.models is None:
raise AppRegistryNotReady(
"Models for app '%s' haven't been imported yet." % self.label)
def get_model(self, model_name):
"""
Returns the model with the given case-insensitive model_name.
Raises LookupError if no model exists with this name.
"""
self.check_models_ready()
try:
return self.models[model_name.lower()]
except KeyError:
raise LookupError(
"App '%s' doesn't have a '%s' model." % (self.label, model_name))
def get_models(self, include_auto_created=False,
include_deferred=False, include_swapped=False):
"""
Returns an iterable of models.
By default, the following models aren't included:
- auto-created models for many-to-many relations without
an explicit intermediate table,
- models created to satisfy deferred attribute queries,
- models that have been swapped out.
Set the corresponding keyword argument to True to include such models.
Keyword arguments aren't documented; they're a private API.
"""
self.check_models_ready()
for model in self.models.values():
if model._deferred and not include_deferred:
continue
if model._meta.auto_created and not include_auto_created:
continue
if model._meta.swapped and not include_swapped:
continue
yield model
def import_models(self, all_models):
# Dictionary of models for this app, primarily maintained in the
# 'all_models' attribute of the Apps this AppConfig is attached to.
# Injected as a parameter because it gets populated when models are
# imported, which might happen before populate() imports models.
self.models = all_models
if module_has_submodule(self.module, MODELS_MODULE_NAME):
models_module_name = '%s.%s' % (self.name, MODELS_MODULE_NAME)
self.models_module = import_module(models_module_name)
def ready(self):
"""
Override this method in subclasses to run code when Django starts.
"""
|
mit
|
jeffreyliu3230/osf.io
|
website/conferences/utils.py
|
20
|
3066
|
# -*- coding: utf-8 -*-
import uuid
import requests
from modularodm import Q
from modularodm.exceptions import ModularOdmException
from framework.auth import Auth
from framework.auth.core import get_user
from website import util
from website import security
from website import settings
from website.project import new_node
from website.models import User, Node, MailRecord
def record_message(message, created):
record = MailRecord(
data=message.raw,
records=created,
)
record.save()
def get_or_create_user(fullname, address, is_spam):
"""Get or create user by email address.
:param str fullname: User full name
:param str address: User email address
:param bool is_spam: User flagged as potential spam
:return: Tuple of (user, created)
"""
user = get_user(email=address)
if user:
return user, False
else:
password = str(uuid.uuid4())
user = User.create_confirmed(address, password, fullname)
user.verification_key = security.random_string(20)
if is_spam:
user.system_tags.append('is_spam')
user.save()
return user, True
def get_or_create_node(title, user):
"""Get or create node by title and creating user.
:param str title: Node title
:param User user: User creating node
:return: Tuple of (node, created)
"""
try:
node = Node.find_one(
Q('title', 'iexact', title)
& Q('contributors', 'eq', user._id)
)
return node, False
except ModularOdmException:
node = new_node('project', title, user)
return node, True
def provision_node(conference, message, node, user):
"""
:param Conference conference:
:param ConferenceMessage message:
:param Node node:
:param User user:
"""
auth = Auth(user=user)
node.update_node_wiki('home', message.text, auth)
node.add_contributors(prepare_contributors(conference.admins), log=False)
if not message.is_spam and conference.public_projects:
node.set_privacy('public', auth=auth)
node.add_tag(message.conference_name, auth=auth)
node.add_tag(message.conference_category, auth=auth)
node.system_tags.extend(['emailed', message.conference_name, message.conference_category])
if message.is_spam:
node.system_tags.append('spam')
node.save()
def prepare_contributors(admins):
return [
{
'user': admin,
'permissions': ['read', 'write', 'admin'],
'visible': False,
}
for admin in admins
]
def upload_attachment(user, node, attachment):
attachment.seek(0)
name = '/' + (attachment.filename or settings.MISSING_FILE_NAME)
content = attachment.read()
upload_url = util.waterbutler_url_for('upload', 'osfstorage', name, node, user=user)
requests.put(
upload_url,
data=content,
)
def upload_attachments(user, node, attachments):
for attachment in attachments:
upload_attachment(user, node, attachment)
|
apache-2.0
|
omprakasha/odoo
|
addons/l10n_de/__openerp__.py
|
260
|
7736
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# SKR03
# =====
# Dieses Modul bietet Ihnen einen deutschen Kontenplan basierend auf dem SKR03.
# Gemäss der aktuellen Einstellungen ist die Firma nicht Umsatzsteuerpflichtig.
# Diese Grundeinstellung ist sehr einfach zu ändern und bedarf in der Regel
# grundsätzlich eine initiale Zuweisung von Steuerkonten zu Produkten und / oder
# Sachkonten oder zu Partnern.
# Die Umsatzsteuern (voller Steuersatz, reduzierte Steuer und steuerfrei)
# sollten bei den Produktstammdaten hinterlegt werden (in Abhängigkeit der
# Steuervorschriften). Die Zuordnung erfolgt auf dem Aktenreiter Finanzbuchhaltung
# (Kategorie: Umsatzsteuer).
# Die Vorsteuern (voller Steuersatz, reduzierte Steuer und steuerfrei)
# sollten ebenso bei den Produktstammdaten hinterlegt werden (in Abhängigkeit
# der Steuervorschriften). Die Zuordnung erfolgt auf dem Aktenreiter
# Finanzbuchhaltung (Kategorie: Vorsteuer).
# Die Zuordnung der Steuern für Ein- und Ausfuhren aus EU Ländern, sowie auch
# für den Ein- und Verkauf aus und in Drittländer sollten beim Partner
# (Lieferant/Kunde)hinterlegt werden (in Anhängigkeit vom Herkunftsland
# des Lieferanten/Kunden). Die Zuordnung beim Kunden ist 'höherwertig' als
# die Zuordnung bei Produkten und überschreibt diese im Einzelfall.
#
# Zur Vereinfachung der Steuerausweise und Buchung bei Auslandsgeschäften
# erlaubt OpenERP ein generelles Mapping von Steuerausweis und Steuerkonten
# (z.B. Zuordnung 'Umsatzsteuer 19%' zu 'steuerfreie Einfuhren aus der EU')
# zwecks Zuordnung dieses Mappings zum ausländischen Partner (Kunde/Lieferant).
# Die Rechnungsbuchung beim Einkauf bewirkt folgendes:
# Die Steuerbemessungsgrundlage (exklusive Steuer) wird ausgewiesen bei den
# jeweiligen Kategorien für den Vorsteuer Steuermessbetrag (z.B. Vorsteuer
# Steuermessbetrag Voller Steuersatz 19%).
# Der Steuerbetrag erscheint unter der Kategorie 'Vorsteuern' (z.B. Vorsteuer
# 19%). Durch multidimensionale Hierachien können verschiedene Positionen
# zusammengefasst werden und dann in Form eines Reports ausgegeben werden.
#
# Die Rechnungsbuchung beim Verkauf bewirkt folgendes:
# Die Steuerbemessungsgrundlage (exklusive Steuer) wird ausgewiesen bei den
# jeweiligen Kategorien für den Umsatzsteuer Steuermessbetrag
# (z.B. Umsatzsteuer Steuermessbetrag Voller Steuersatz 19%).
# Der Steuerbetrag erscheint unter der Kategorie 'Umsatzsteuer'
# (z.B. Umsatzsteuer 19%). Durch multidimensionale Hierachien können
# verschiedene Positionen zusammengefasst werden.
# Die zugewiesenen Steuerausweise können auf Ebene der einzelnen
# Rechnung (Eingangs- und Ausgangsrechnung) nachvollzogen werden,
# und dort gegebenenfalls angepasst werden.
# Rechnungsgutschriften führen zu einer Korrektur (Gegenposition)
# der Steuerbuchung, in Form einer spiegelbildlichen Buchung.
# SKR04
# =====
# Dieses Modul bietet Ihnen einen deutschen Kontenplan basierend auf dem SKR04.
# Gemäss der aktuellen Einstellungen ist die Firma nicht Umsatzsteuerpflichtig,
# d.h. im Standard existiert keine Zuordnung von Produkten und Sachkonten zu
# Steuerschlüsseln.
# Diese Grundeinstellung ist sehr einfach zu ändern und bedarf in der Regel
# grundsätzlich eine initiale Zuweisung von Steuerschlüsseln zu Produkten und / oder
# Sachkonten oder zu Partnern.
# Die Umsatzsteuern (voller Steuersatz, reduzierte Steuer und steuerfrei)
# sollten bei den Produktstammdaten hinterlegt werden (in Abhängigkeit der
# Steuervorschriften). Die Zuordnung erfolgt auf dem Aktenreiter Finanzbuchhaltung
# (Kategorie: Umsatzsteuer).
# Die Vorsteuern (voller Steuersatz, reduzierte Steuer und steuerfrei)
# sollten ebenso bei den Produktstammdaten hinterlegt werden (in Abhängigkeit
# der Steuervorschriften). Die Zuordnung erfolgt auf dem Aktenreiter
# Finanzbuchhaltung (Kategorie: Vorsteuer).
# Die Zuordnung der Steuern für Ein- und Ausfuhren aus EU Ländern, sowie auch
# für den Ein- und Verkauf aus und in Drittländer sollten beim Partner
# (Lieferant/Kunde) hinterlegt werden (in Anhängigkeit vom Herkunftsland
# des Lieferanten/Kunden). Die Zuordnung beim Kunden ist 'höherwertig' als
# die Zuordnung bei Produkten und überschreibt diese im Einzelfall.
#
# Zur Vereinfachung der Steuerausweise und Buchung bei Auslandsgeschäften
# erlaubt OpenERP ein generelles Mapping von Steuerausweis und Steuerkonten
# (z.B. Zuordnung 'Umsatzsteuer 19%' zu 'steuerfreie Einfuhren aus der EU')
# zwecks Zuordnung dieses Mappings zum ausländischen Partner (Kunde/Lieferant).
# Die Rechnungsbuchung beim Einkauf bewirkt folgendes:
# Die Steuerbemessungsgrundlage (exklusive Steuer) wird ausgewiesen bei den
# jeweiligen Kategorien für den Vorsteuer Steuermessbetrag (z.B. Vorsteuer
# Steuermessbetrag Voller Steuersatz 19%).
# Der Steuerbetrag erscheint unter der Kategorie 'Vorsteuern' (z.B. Vorsteuer
# 19%). Durch multidimensionale Hierachien können verschiedene Positionen
# zusammengefasst werden und dann in Form eines Reports ausgegeben werden.
#
# Die Rechnungsbuchung beim Verkauf bewirkt folgendes:
# Die Steuerbemessungsgrundlage (exklusive Steuer) wird ausgewiesen bei den
# jeweiligen Kategorien für den Umsatzsteuer Steuermessbetrag
# (z.B. Umsatzsteuer Steuermessbetrag Voller Steuersatz 19%).
# Der Steuerbetrag erscheint unter der Kategorie 'Umsatzsteuer'
# (z.B. Umsatzsteuer 19%). Durch multidimensionale Hierachien können
# verschiedene Positionen zusammengefasst werden.
# Die zugewiesenen Steuerausweise können auf Ebene der einzelnen
# Rechnung (Eingangs- und Ausgangsrechnung) nachvollzogen werden,
# und dort gegebenenfalls angepasst werden.
# Rechnungsgutschriften führen zu einer Korrektur (Gegenposition)
# der Steuerbuchung, in Form einer spiegelbildlichen Buchung.
{
'name': 'Deutschland - Accounting',
'version': '1.0',
'author': 'openbig.org',
'website': 'http://www.openbig.org',
'category': 'Localization/Account Charts',
'description': """
Dieses Modul beinhaltet einen deutschen Kontenrahmen basierend auf dem SKR03.
==============================================================================
German accounting chart and localization.
""",
'depends': ['base', 'account', 'base_iban', 'base_vat', 'account_chart'],
'demo': [ ],
'data': [
'account_tax_skr03.xml',
'account_types_skr03.xml',
'account_chart_skr03.xml',
'account_chart_template_skr03.xml',
'account_tax_fiscal_position_skr03.xml',
'account_tax_skr04.xml',
'account_types_skr04.xml',
'account_chart_skr04.xml',
'account_chart_template_skr04.xml',
'account_tax_fiscal_position_skr04.xml',
'l10n_de_wizard.xml',
],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
acsone/sale-workflow
|
sale_service_project/models/hr_timesheet_invoice.py
|
9
|
5715
|
# -*- coding: utf-8 -*-
# © 2015 Antiun Ingeniería S.L. - Sergio Teruel
# © 2015 Antiun Ingeniería S.L. - Carlos Dauden
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, api, _
from openerp.exceptions import except_orm
class HrAnalyticTimesheet(models.Model):
_inherit = "hr.analytic.timesheet"
@api.multi
def _get_sale_lines(self):
task_works = self.env['project.task.work'].search(
[('hr_analytic_timesheet_id', '=', self.id)])
return task_works.mapped('task_id.sale_line_id')
class AccountAnalyticLine(models.Model):
_inherit = 'account.analytic.line'
@api.multi
def _get_sale_lines(self):
timesheet_obj = self.env['hr.analytic.timesheet']
timesheet = timesheet_obj.search([('line_id', 'in', self.ids)])
task_works = self.env['project.task.work'].search(
[('hr_analytic_timesheet_id', 'in', timesheet.ids)])
return task_works.mapped('task_id.sale_line_id')
@api.multi
def invoice_cost_create(self, data=None):
invoice_line_obj = self.env['account.invoice.line']
analytic_line_obj = self.env['account.analytic.line']
invoices = self.env['account.invoice']
if data is None:
data = {}
# use key (partner/account, company, currency)
# creates one invoice per key
invoice_grouping = {}
# prepare for iteration on journal and accounts
for line in self:
key = (line.account_id.id,
line.account_id.company_id.id,
line.account_id.pricelist_id.currency_id.id)
invoice_grouping.setdefault(key, analytic_line_obj)
invoice_grouping[key] = invoice_grouping[key] | line
for (key_id, company_id, currency_id), analytic_lines in \
invoice_grouping.items():
# key_id is an account.analytic.account
account = analytic_lines[0].account_id
partner = account.partner_id # will be the same for every line
if (not partner) or not (currency_id):
raise except_orm(_('Error!'), _(
'Contract incomplete. Please fill in the Customer and '
'Pricelist fields for %s.') % (account.name))
curr_invoice = self._prepare_cost_invoice(
partner, company_id, currency_id, analytic_lines)
invoice_context = dict(
self.env.context, lang=partner.lang, force_company=company_id,
company_id=company_id)
last_invoice = self.env['account.invoice'].with_context(
invoice_context).create(curr_invoice)
invoices = invoices | last_invoice
# use key (product, uom, user, invoiceable,
# analytic account, journal type)
# creates one invoice line per key
invoice_lines_grouping = {}
for analytic_line in analytic_lines:
if not analytic_line.to_invoice:
raise except_orm(_('Error!'), _(
'Trying to invoice non invoiceable line for %s.') % (
analytic_line.product_id.name))
key = (analytic_line.product_id.id,
analytic_line.product_uom_id.id,
analytic_line.user_id.id,
analytic_line.to_invoice.id,
analytic_line.account_id,
analytic_line.journal_id.type)
analytic_line = analytic_line_obj.with_context(
invoice_context).browse(
[line.id for line in analytic_line])
invoice_lines_grouping.setdefault(key, []).append(
analytic_line)
# finally creates the invoice line
for (product_id, uom, user_id, factor_id, account, journal_type),\
lines_to_invoice in invoice_lines_grouping.items():
curr_invoice_line = self.with_context(
invoice_context)._prepare_cost_invoice_line(
last_invoice.id, product_id, uom, user_id, factor_id,
account, lines_to_invoice, journal_type, data)
new_invoice_line = invoice_line_obj.create(curr_invoice_line)
sale_lines = analytic_lines._get_sale_lines()
sale_lines.write(
{'invoice_lines': [(6, 0, [new_invoice_line.id])]})
sale_lines.mapped('order_id').write(
{'invoice_ids': [(4, last_invoice.id)],
'state': 'done'})
analytic_lines.write({'invoice_id': last_invoice.id})
invoices.button_reset_taxes()
return invoices.ids
@api.model
def _prepare_cost_invoice_line(
self, invoice_id, product_id, uom, user_id, factor_id, account,
analytic_lines, journal_type, data):
res = super(AccountAnalyticLine, self)._prepare_cost_invoice_line(
invoice_id, product_id, uom, user_id, factor_id, account,
analytic_lines, journal_type, data)
analytic_lines_ids = [x.id for x in analytic_lines]
works = self.env['project.task.work'].search([
('hr_analytic_timesheet_id.line_id', 'in', analytic_lines_ids)])
materials = self.env['project.task.materials']
if 'analytic_line_id' in materials._all_columns:
materials = materials.search([
('analytic_line_id', 'in', analytic_lines_ids)])
res['task_work_ids'] = [(6, 0, works.ids)]
res['task_materials_ids'] = [(6, 0, materials.ids)]
return res
|
agpl-3.0
|
polyaxon/polyaxon
|
platform/coredb/tests/test_runs/test_run_model.py
|
1
|
3332
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from rest_framework.exceptions import ValidationError
from django.test import TestCase
from coredb.factories.projects import ProjectFactory
from coredb.factories.runs import RunFactory
from coredb.factories.users import UserFactory
from coredb.managers.deleted import ArchivedManager, LiveManager
from coredb.managers.statuses import new_run_status
from coredb.models.runs import Run
from polyaxon.lifecycle import V1StatusCondition, V1Statuses
class TestRunModel(TestCase):
def setUp(self):
super().setUp()
self.user = UserFactory()
self.project = ProjectFactory()
self.run = RunFactory(project=self.project)
def test_create_run_without_spec(self):
run = RunFactory(project=self.project, user=self.user)
assert run.name is None
def test_create_run_with_no_spec_or_params(self):
assert self.run.tags is None
assert self.run.inputs is None
assert self.run.outputs is None
def test_create_run_with_no_spec_and_params(self):
run = RunFactory(project=self.project, content=None)
assert run.content is None
def test_create_run_without_content_passes(self):
run = RunFactory(project=self.project)
assert run.content is None
assert run.is_managed is False
def test_create_run_without_content_and_managed_raises(self):
with self.assertRaises(ValidationError):
RunFactory(project=self.project, is_managed=True)
def test_create_run_with_content_and_is_managed(self):
with self.assertRaises(ValidationError):
RunFactory(project=self.project, is_managed=True, content="foo")
RunFactory(project=self.project, is_managed=True, raw_content="foo")
def test_creation_with_bad_config(self):
run = RunFactory(project=self.project, content="foo")
assert run.status == V1Statuses.CREATED
assert run.content == "foo"
def test_status_update_results_in_new_updated_at_datetime(self):
updated_at = self.run.updated_at
# Create new status
new_run_status(
self.run,
condition=V1StatusCondition.get_condition(
type=V1Statuses.STARTING, status=True
),
)
assert updated_at < self.run.updated_at
updated_at = self.run.updated_at
# Create new status
new_run_status(
self.run,
condition=V1StatusCondition.get_condition(
type=V1Statuses.STARTING, status=True
),
)
assert updated_at < self.run.updated_at
def test_managers(self):
assert isinstance(Run.objects, LiveManager)
assert isinstance(Run.archived, ArchivedManager)
|
apache-2.0
|
jorsea/odoo-addons
|
account_bank_voucher/wizard/bank_statement_populate.py
|
4
|
3057
|
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import fields, models, api
class account_voucher_populate_statement(models.TransientModel):
_name = "account.voucher.populate.statement"
_description = "Account Voucher Populate Statement"
journal_id = fields.Many2one(
'account.journal',
'Journal',
required=True
)
line_ids = fields.Many2many(
'account.voucher',
'account_voucher_line_rel_',
'voucher_id', 'line_id',
'Vouchers',
domain="[('journal_id', '=', journal_id), ('state', '=', 'posted'), ('bank_statement_line_ids', '=', False)]"
)
def get_statement_line_new(self, cr, uid, voucher, statement, context=None):
# Override thi method to modifiy the new statement line to create
ctx = context.copy()
ctx['date'] = voucher.date
amount = self.pool.get('res.currency').compute(cr, uid, voucher.currency_id.id,
statement.currency.id, voucher.amount, context=ctx)
sign = voucher.type == 'payment' and -1.0 or 1.0
type = voucher.type == 'payment' and 'supplier' or 'customer'
account_id = voucher.type == 'payment' and voucher.partner_id.property_account_payable.id or voucher.partner_id.property_account_receivable.id
return {
'name': voucher.reference or voucher.number or '?',
'amount': sign * amount,
'type': type,
'partner_id': voucher.partner_id.id,
'account_id': account_id,
'statement_id': statement.id,
'ref': voucher.name,
'voucher_id': voucher.id,
'journal_entry_id': voucher.move_id.id,
}
def populate_statement(self, cr, uid, ids, context=None):
statement_obj = self.pool.get('account.bank.statement')
statement_line_obj = self.pool.get('account.bank.statement.line')
voucher_obj = self.pool.get('account.voucher')
if context is None:
context = {}
data = self.read(cr, uid, ids, [], context=context)[0]
voucher_ids = data['line_ids']
if not voucher_ids:
return {'type': 'ir.actions.act_window_close'}
statement = statement_obj.browse(
cr, uid, context['active_id'], context=context)
for voucher in voucher_obj.browse(cr, uid, voucher_ids, context=context):
statement_line_obj.create(cr, uid,
self.get_statement_line_new(cr, uid, voucher, statement, context=context), context=context)
voucher_obj.write(
cr, uid, voucher_ids, {'is_bank_voucher': True}, context=context)
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Storyyeller/Krakatau
|
Krakatau/classfileformat/reader.py
|
1
|
1221
|
import struct
class TruncatedStreamError(EOFError):
pass
class Reader(object):
__slots__ = ['d', 'off']
def __init__(self, data, off=0):
self.d = data
self.off = off
def done(self): return self.off >= len(self.d)
def copy(self): return Reader(self.d, self.off)
def u8(self): return self.get('>B')
def s8(self): return self.get('>b')
def u16(self): return self.get('>H')
def s16(self): return self.get('>h')
def u32(self): return self.get('>I')
def s32(self): return self.get('>i')
def u64(self): return self.get('>Q')
# binUnpacker functions
def get(self, fmt, forceTuple=False, peek=False):
size = struct.calcsize(fmt)
if self.size() < size:
raise TruncatedStreamError()
val = struct.unpack_from(fmt, self.d, self.off)
if not peek:
self.off += size
if not forceTuple and len(val) == 1:
val = val[0]
return val
def getRaw(self, num):
if self.size() < num:
raise TruncatedStreamError()
val = self.d[self.off:self.off+num]
self.off += num
return val
def size(self):
return len(self.d) - self.off
|
gpl-3.0
|
GreenRecycleBin/servo
|
tests/wpt/web-platform-tests/webdriver/cookie/cookie_test.py
|
58
|
1987
|
import os
import sys
import unittest
sys.path.insert(1, os.path.abspath(os.path.join(__file__, "../..")))
import base_test
from webdriver import exceptions
class CookieTest(base_test.WebDriverBaseTest):
def setUp(self):
self.driver.get(self.webserver.where_is("cookie/res/cookie_container.html"))
def test_can_create_a_well_formed_cookie( self ):
name = 'foo'
value = 'bar'
self.driver.add_cookie({ 'name': name, 'value': value })
def test_cookies_should_allow_secure_to_be_set( self ):
name = 'foo'
value = 'bar'
secure = True
self.driver.add_cookie({ 'name': name,
'value': value,
'path': '/',
'secure': secure})
self.assertTrue(self.driver.get_cookie(name)[0]['secure'])
def test_secure_defaults_to_false( self ):
name = 'foo'
value = 'bar'
self.driver.add_cookie({ 'name': name,
'value': value})
self.assertFalse(self.driver.get_cookie(name)[0]['secure'])
def test_should_throw_an_exception_when_semicolon_exists_in_the_cookie_attribute(self):
invalid_name = 'foo;bar'
value = 'foobar'
try:
self.driver.add_cookie({ 'name': invalid_name, 'value': value })
self.fail( 'should have thrown exceptions.' )
except exceptions.UnableToSetCookieException:
pass
except exceptions.InvalidCookieDomainException:
pass
def test_should_throw_an_exception_the_name_is_null(self):
val = 'foobar'
try:
self.driver.add_cookie({ 'name': None, 'value': val })
self.fail( 'should have thrown exceptions.' )
except exceptions.UnableToSetCookieException:
pass
except exceptions.InvalidCookieDomainException:
pass
if __name__ == '__main__':
unittest.main()
|
mpl-2.0
|
nzavagli/UnrealPy
|
UnrealPyEmbed/Source/Python/Lib/python27/lib-tk/test/widget_tests.py
|
24
|
19643
|
# Common tests for test_tkinter/test_widgets.py and test_ttk/test_widgets.py
import unittest
import sys
import Tkinter as tkinter
from ttk import Scale
from test_ttk.support import (AbstractTkTest, tcl_version, requires_tcl,
get_tk_patchlevel, pixels_conv, tcl_obj_eq)
import test.test_support
noconv = noconv_meth = False
if get_tk_patchlevel() < (8, 5, 11):
noconv = str
noconv_meth = noconv and staticmethod(noconv)
def int_round(x):
return int(round(x))
pixels_round = int_round
if get_tk_patchlevel()[:3] == (8, 5, 11):
# Issue #19085: Workaround a bug in Tk
# http://core.tcl.tk/tk/info/3497848
pixels_round = int
_sentinel = object()
class AbstractWidgetTest(AbstractTkTest):
_conv_pixels = staticmethod(pixels_round)
_conv_pad_pixels = None
_stringify = False
@property
def scaling(self):
try:
return self._scaling
except AttributeError:
self._scaling = float(self.root.call('tk', 'scaling'))
return self._scaling
def _str(self, value):
if not self._stringify and self.wantobjects and tcl_version >= (8, 6):
return value
if isinstance(value, tuple):
return ' '.join(map(self._str, value))
return str(value)
def assertEqual2(self, actual, expected, msg=None, eq=object.__eq__):
if eq(actual, expected):
return
self.assertEqual(actual, expected, msg)
def checkParam(self, widget, name, value, expected=_sentinel,
conv=False, eq=None):
widget[name] = value
if expected is _sentinel:
expected = value
if conv:
expected = conv(expected)
if self._stringify or not self.wantobjects:
if isinstance(expected, tuple):
expected = tkinter._join(expected)
else:
expected = str(expected)
if eq is None:
eq = tcl_obj_eq
self.assertEqual2(widget[name], expected, eq=eq)
self.assertEqual2(widget.cget(name), expected, eq=eq)
# XXX
if not isinstance(widget, Scale):
t = widget.configure(name)
self.assertEqual(len(t), 5)
self.assertEqual2(t[4], expected, eq=eq)
def checkInvalidParam(self, widget, name, value, errmsg=None,
keep_orig=True):
orig = widget[name]
if errmsg is not None:
errmsg = errmsg.format(value)
with self.assertRaises(tkinter.TclError) as cm:
widget[name] = value
if errmsg is not None:
self.assertEqual(str(cm.exception), errmsg)
if keep_orig:
self.assertEqual(widget[name], orig)
else:
widget[name] = orig
with self.assertRaises(tkinter.TclError) as cm:
widget.configure({name: value})
if errmsg is not None:
self.assertEqual(str(cm.exception), errmsg)
if keep_orig:
self.assertEqual(widget[name], orig)
else:
widget[name] = orig
def checkParams(self, widget, name, *values, **kwargs):
for value in values:
self.checkParam(widget, name, value, **kwargs)
def checkIntegerParam(self, widget, name, *values, **kwargs):
self.checkParams(widget, name, *values, **kwargs)
self.checkInvalidParam(widget, name, '',
errmsg='expected integer but got ""')
self.checkInvalidParam(widget, name, '10p',
errmsg='expected integer but got "10p"')
self.checkInvalidParam(widget, name, 3.2,
errmsg='expected integer but got "3.2"')
def checkFloatParam(self, widget, name, *values, **kwargs):
if 'conv' in kwargs:
conv = kwargs.pop('conv')
else:
conv = float
for value in values:
self.checkParam(widget, name, value, conv=conv, **kwargs)
self.checkInvalidParam(widget, name, '',
errmsg='expected floating-point number but got ""')
self.checkInvalidParam(widget, name, 'spam',
errmsg='expected floating-point number but got "spam"')
def checkBooleanParam(self, widget, name):
for value in (False, 0, 'false', 'no', 'off'):
self.checkParam(widget, name, value, expected=0)
for value in (True, 1, 'true', 'yes', 'on'):
self.checkParam(widget, name, value, expected=1)
self.checkInvalidParam(widget, name, '',
errmsg='expected boolean value but got ""')
self.checkInvalidParam(widget, name, 'spam',
errmsg='expected boolean value but got "spam"')
def checkColorParam(self, widget, name, allow_empty=None, **kwargs):
self.checkParams(widget, name,
'#ff0000', '#00ff00', '#0000ff', '#123456',
'red', 'green', 'blue', 'white', 'black', 'grey',
**kwargs)
self.checkInvalidParam(widget, name, 'spam',
errmsg='unknown color name "spam"')
def checkCursorParam(self, widget, name, **kwargs):
self.checkParams(widget, name, 'arrow', 'watch', 'cross', '',**kwargs)
if tcl_version >= (8, 5):
self.checkParam(widget, name, 'none')
self.checkInvalidParam(widget, name, 'spam',
errmsg='bad cursor spec "spam"')
def checkCommandParam(self, widget, name):
def command(*args):
pass
widget[name] = command
self.assertTrue(widget[name])
self.checkParams(widget, name, '')
def checkEnumParam(self, widget, name, *values, **kwargs):
if 'errmsg' in kwargs:
errmsg = kwargs.pop('errmsg')
else:
errmsg = None
self.checkParams(widget, name, *values, **kwargs)
if errmsg is None:
errmsg2 = ' %s "{}": must be %s%s or %s' % (
name,
', '.join(values[:-1]),
',' if len(values) > 2 else '',
values[-1])
self.checkInvalidParam(widget, name, '',
errmsg='ambiguous' + errmsg2)
errmsg = 'bad' + errmsg2
self.checkInvalidParam(widget, name, 'spam', errmsg=errmsg)
def checkPixelsParam(self, widget, name, *values, **kwargs):
if 'conv' in kwargs:
conv = kwargs.pop('conv')
else:
conv = None
if conv is None:
conv = self._conv_pixels
if 'keep_orig' in kwargs:
keep_orig = kwargs.pop('keep_orig')
else:
keep_orig = True
for value in values:
expected = _sentinel
conv1 = conv
if isinstance(value, str):
if conv1 and conv1 is not str:
expected = pixels_conv(value) * self.scaling
conv1 = int_round
self.checkParam(widget, name, value, expected=expected,
conv=conv1, **kwargs)
self.checkInvalidParam(widget, name, '6x',
errmsg='bad screen distance "6x"', keep_orig=keep_orig)
self.checkInvalidParam(widget, name, 'spam',
errmsg='bad screen distance "spam"', keep_orig=keep_orig)
def checkReliefParam(self, widget, name):
self.checkParams(widget, name,
'flat', 'groove', 'raised', 'ridge', 'solid', 'sunken')
errmsg='bad relief "spam": must be '\
'flat, groove, raised, ridge, solid, or sunken'
if tcl_version < (8, 6):
errmsg = None
self.checkInvalidParam(widget, name, 'spam',
errmsg=errmsg)
def checkImageParam(self, widget, name):
image = tkinter.PhotoImage(master=self.root, name='image1')
self.checkParam(widget, name, image, conv=str)
self.checkInvalidParam(widget, name, 'spam',
errmsg='image "spam" doesn\'t exist')
widget[name] = ''
def checkVariableParam(self, widget, name, var):
self.checkParam(widget, name, var, conv=str)
def assertIsBoundingBox(self, bbox):
self.assertIsNotNone(bbox)
self.assertIsInstance(bbox, tuple)
if len(bbox) != 4:
self.fail('Invalid bounding box: %r' % (bbox,))
for item in bbox:
if not isinstance(item, int):
self.fail('Invalid bounding box: %r' % (bbox,))
break
class StandardOptionsTests(object):
STANDARD_OPTIONS = (
'activebackground', 'activeborderwidth', 'activeforeground', 'anchor',
'background', 'bitmap', 'borderwidth', 'compound', 'cursor',
'disabledforeground', 'exportselection', 'font', 'foreground',
'highlightbackground', 'highlightcolor', 'highlightthickness',
'image', 'insertbackground', 'insertborderwidth',
'insertofftime', 'insertontime', 'insertwidth',
'jump', 'justify', 'orient', 'padx', 'pady', 'relief',
'repeatdelay', 'repeatinterval',
'selectbackground', 'selectborderwidth', 'selectforeground',
'setgrid', 'takefocus', 'text', 'textvariable', 'troughcolor',
'underline', 'wraplength', 'xscrollcommand', 'yscrollcommand',
)
def test_activebackground(self):
widget = self.create()
self.checkColorParam(widget, 'activebackground')
def test_activeborderwidth(self):
widget = self.create()
self.checkPixelsParam(widget, 'activeborderwidth',
0, 1.3, 2.9, 6, -2, '10p')
def test_activeforeground(self):
widget = self.create()
self.checkColorParam(widget, 'activeforeground')
def test_anchor(self):
widget = self.create()
self.checkEnumParam(widget, 'anchor',
'n', 'ne', 'e', 'se', 's', 'sw', 'w', 'nw', 'center')
def test_background(self):
widget = self.create()
self.checkColorParam(widget, 'background')
if 'bg' in self.OPTIONS:
self.checkColorParam(widget, 'bg')
def test_bitmap(self):
widget = self.create()
self.checkParam(widget, 'bitmap', 'questhead')
self.checkParam(widget, 'bitmap', 'gray50')
filename = test.test_support.findfile('python.xbm', subdir='imghdrdata')
self.checkParam(widget, 'bitmap', '@' + filename)
# Cocoa Tk widgets don't detect invalid -bitmap values
# See https://core.tcl.tk/tk/info/31cd33dbf0
if not ('aqua' in self.root.tk.call('tk', 'windowingsystem') and
'AppKit' in self.root.winfo_server()):
self.checkInvalidParam(widget, 'bitmap', 'spam',
errmsg='bitmap "spam" not defined')
def test_borderwidth(self):
widget = self.create()
self.checkPixelsParam(widget, 'borderwidth',
0, 1.3, 2.6, 6, -2, '10p')
if 'bd' in self.OPTIONS:
self.checkPixelsParam(widget, 'bd', 0, 1.3, 2.6, 6, -2, '10p')
def test_compound(self):
widget = self.create()
self.checkEnumParam(widget, 'compound',
'bottom', 'center', 'left', 'none', 'right', 'top')
def test_cursor(self):
widget = self.create()
self.checkCursorParam(widget, 'cursor')
def test_disabledforeground(self):
widget = self.create()
self.checkColorParam(widget, 'disabledforeground')
def test_exportselection(self):
widget = self.create()
self.checkBooleanParam(widget, 'exportselection')
def test_font(self):
widget = self.create()
self.checkParam(widget, 'font',
'-Adobe-Helvetica-Medium-R-Normal--*-120-*-*-*-*-*-*')
self.checkInvalidParam(widget, 'font', '',
errmsg='font "" doesn\'t exist')
def test_foreground(self):
widget = self.create()
self.checkColorParam(widget, 'foreground')
if 'fg' in self.OPTIONS:
self.checkColorParam(widget, 'fg')
def test_highlightbackground(self):
widget = self.create()
self.checkColorParam(widget, 'highlightbackground')
def test_highlightcolor(self):
widget = self.create()
self.checkColorParam(widget, 'highlightcolor')
def test_highlightthickness(self):
widget = self.create()
self.checkPixelsParam(widget, 'highlightthickness',
0, 1.3, 2.6, 6, '10p')
self.checkParam(widget, 'highlightthickness', -2, expected=0,
conv=self._conv_pixels)
@unittest.skipIf(sys.platform == 'darwin',
'crashes with Cocoa Tk (issue19733)')
def test_image(self):
widget = self.create()
self.checkImageParam(widget, 'image')
def test_insertbackground(self):
widget = self.create()
self.checkColorParam(widget, 'insertbackground')
def test_insertborderwidth(self):
widget = self.create()
self.checkPixelsParam(widget, 'insertborderwidth',
0, 1.3, 2.6, 6, -2, '10p')
def test_insertofftime(self):
widget = self.create()
self.checkIntegerParam(widget, 'insertofftime', 100)
def test_insertontime(self):
widget = self.create()
self.checkIntegerParam(widget, 'insertontime', 100)
def test_insertwidth(self):
widget = self.create()
self.checkPixelsParam(widget, 'insertwidth', 1.3, 2.6, -2, '10p')
def test_jump(self):
widget = self.create()
self.checkBooleanParam(widget, 'jump')
def test_justify(self):
widget = self.create()
self.checkEnumParam(widget, 'justify', 'left', 'right', 'center',
errmsg='bad justification "{}": must be '
'left, right, or center')
self.checkInvalidParam(widget, 'justify', '',
errmsg='ambiguous justification "": must be '
'left, right, or center')
def test_orient(self):
widget = self.create()
self.assertEqual(str(widget['orient']), self.default_orient)
self.checkEnumParam(widget, 'orient', 'horizontal', 'vertical')
def test_padx(self):
widget = self.create()
self.checkPixelsParam(widget, 'padx', 3, 4.4, 5.6, -2, '12m',
conv=self._conv_pad_pixels)
def test_pady(self):
widget = self.create()
self.checkPixelsParam(widget, 'pady', 3, 4.4, 5.6, -2, '12m',
conv=self._conv_pad_pixels)
def test_relief(self):
widget = self.create()
self.checkReliefParam(widget, 'relief')
def test_repeatdelay(self):
widget = self.create()
self.checkIntegerParam(widget, 'repeatdelay', -500, 500)
def test_repeatinterval(self):
widget = self.create()
self.checkIntegerParam(widget, 'repeatinterval', -500, 500)
def test_selectbackground(self):
widget = self.create()
self.checkColorParam(widget, 'selectbackground')
def test_selectborderwidth(self):
widget = self.create()
self.checkPixelsParam(widget, 'selectborderwidth', 1.3, 2.6, -2, '10p')
def test_selectforeground(self):
widget = self.create()
self.checkColorParam(widget, 'selectforeground')
def test_setgrid(self):
widget = self.create()
self.checkBooleanParam(widget, 'setgrid')
def test_state(self):
widget = self.create()
self.checkEnumParam(widget, 'state', 'active', 'disabled', 'normal')
def test_takefocus(self):
widget = self.create()
self.checkParams(widget, 'takefocus', '0', '1', '')
def test_text(self):
widget = self.create()
self.checkParams(widget, 'text', '', 'any string')
def test_textvariable(self):
widget = self.create()
var = tkinter.StringVar(self.root)
self.checkVariableParam(widget, 'textvariable', var)
def test_troughcolor(self):
widget = self.create()
self.checkColorParam(widget, 'troughcolor')
def test_underline(self):
widget = self.create()
self.checkIntegerParam(widget, 'underline', 0, 1, 10)
def test_wraplength(self):
widget = self.create()
self.checkPixelsParam(widget, 'wraplength', 100)
def test_xscrollcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'xscrollcommand')
def test_yscrollcommand(self):
widget = self.create()
self.checkCommandParam(widget, 'yscrollcommand')
# non-standard but common options
def test_command(self):
widget = self.create()
self.checkCommandParam(widget, 'command')
def test_indicatoron(self):
widget = self.create()
self.checkBooleanParam(widget, 'indicatoron')
def test_offrelief(self):
widget = self.create()
self.checkReliefParam(widget, 'offrelief')
def test_overrelief(self):
widget = self.create()
self.checkReliefParam(widget, 'overrelief')
def test_selectcolor(self):
widget = self.create()
self.checkColorParam(widget, 'selectcolor')
def test_selectimage(self):
widget = self.create()
self.checkImageParam(widget, 'selectimage')
@requires_tcl(8, 5)
def test_tristateimage(self):
widget = self.create()
self.checkImageParam(widget, 'tristateimage')
@requires_tcl(8, 5)
def test_tristatevalue(self):
widget = self.create()
self.checkParam(widget, 'tristatevalue', 'unknowable')
def test_variable(self):
widget = self.create()
var = tkinter.DoubleVar(self.root)
self.checkVariableParam(widget, 'variable', var)
class IntegerSizeTests(object):
def test_height(self):
widget = self.create()
self.checkIntegerParam(widget, 'height', 100, -100, 0)
def test_width(self):
widget = self.create()
self.checkIntegerParam(widget, 'width', 402, -402, 0)
class PixelSizeTests(object):
def test_height(self):
widget = self.create()
self.checkPixelsParam(widget, 'height', 100, 101.2, 102.6, -100, 0, '3c')
def test_width(self):
widget = self.create()
self.checkPixelsParam(widget, 'width', 402, 403.4, 404.6, -402, 0, '5i')
def add_standard_options(*source_classes):
# This decorator adds test_xxx methods from source classes for every xxx
# option in the OPTIONS class attribute if they are not defined explicitly.
def decorator(cls):
for option in cls.OPTIONS:
methodname = 'test_' + option
if not hasattr(cls, methodname):
for source_class in source_classes:
if hasattr(source_class, methodname):
setattr(cls, methodname,
getattr(source_class, methodname).im_func)
break
else:
def test(self, option=option):
widget = self.create()
widget[option]
raise AssertionError('Option "%s" is not tested in %s' %
(option, cls.__name__))
test.__name__ = methodname
setattr(cls, methodname, test)
return cls
return decorator
def setUpModule():
if test.test_support.verbose:
tcl = tkinter.Tcl()
print 'patchlevel =', tcl.call('info', 'patchlevel')
|
mit
|
yarikoptic/pystatsmodels
|
statsmodels/sandbox/examples/example_gam.py
|
4
|
2337
|
'''original example for checking how far GAM works
Note: uncomment plt.show() to display graphs
'''
example = 2 # 1,2 or 3
import numpy as np
import numpy.random as R
import matplotlib.pyplot as plt
from statsmodels.sandbox.gam import AdditiveModel
from statsmodels.sandbox.gam import Model as GAM #?
from statsmodels.genmod.families import family
from statsmodels.genmod.generalized_linear_model import GLM
standardize = lambda x: (x - x.mean()) / x.std()
demean = lambda x: (x - x.mean())
nobs = 150
x1 = R.standard_normal(nobs)
x1.sort()
x2 = R.standard_normal(nobs)
x2.sort()
y = R.standard_normal((nobs,))
f1 = lambda x1: (x1 + x1**2 - 3 - 1 * x1**3 + 0.1 * np.exp(-x1/4.))
f2 = lambda x2: (x2 + x2**2 - 0.1 * np.exp(x2/4.))
z = standardize(f1(x1)) + standardize(f2(x2))
z = standardize(z) * 2 # 0.1
y += z
d = np.array([x1,x2]).T
if example == 1:
print "normal"
m = AdditiveModel(d)
m.fit(y)
x = np.linspace(-2,2,50)
print m
y_pred = m.results.predict(d)
plt.figure()
plt.plot(y, '.')
plt.plot(z, 'b-', label='true')
plt.plot(y_pred, 'r-', label='AdditiveModel')
plt.legend()
plt.title('gam.AdditiveModel')
import scipy.stats, time
if example == 2:
print "binomial"
f = family.Binomial()
b = np.asarray([scipy.stats.bernoulli.rvs(p) for p in f.link.inverse(y)])
b.shape = y.shape
m = GAM(b, d, family=f)
toc = time.time()
m.fit(b)
tic = time.time()
print tic-toc
if example == 3:
print "Poisson"
f = family.Poisson()
y = y/y.max() * 3
yp = f.link.inverse(y)
p = np.asarray([scipy.stats.poisson.rvs(p) for p in f.link.inverse(y)], float)
p.shape = y.shape
m = GAM(p, d, family=f)
toc = time.time()
m.fit(p)
tic = time.time()
print tic-toc
plt.figure()
plt.plot(x1, standardize(m.smoothers[0](x1)), 'r')
plt.plot(x1, standardize(f1(x1)), linewidth=2)
plt.figure()
plt.plot(x2, standardize(m.smoothers[1](x2)), 'r')
plt.plot(x2, standardize(f2(x2)), linewidth=2)
plt.show()
## pylab.figure(num=1)
## pylab.plot(x1, standardize(m.smoothers[0](x1)), 'b')
## pylab.plot(x1, standardize(f1(x1)), linewidth=2)
## pylab.figure(num=2)
## pylab.plot(x2, standardize(m.smoothers[1](x2)), 'b')
## pylab.plot(x2, standardize(f2(x2)), linewidth=2)
## pylab.show()
|
bsd-3-clause
|
zerkh/theano_lstm
|
theano_lstm/__init__.py
|
9
|
21559
|
"""
Small Theano LSTM recurrent network module.
@author: Jonathan Raiman
@date: December 10th 2014
Implements most of the great things that came out
in 2014 concerning recurrent neural networks, and
some good optimizers for these types of networks.
Note (from 5 January 2015): Dropout api is a bit sophisticated due to the way
random number generators are dealt with in Theano's scan.
"""
import theano, theano.tensor as T
import numpy as np
from collections import OrderedDict
srng = theano.tensor.shared_randomstreams.RandomStreams(1234)
np_rng = np.random.RandomState(1234)
from .masked_loss import masked_loss, masked_loss_dx
from .shared_memory import wrap_params, borrow_memory, borrow_all_memories
class GradClip(theano.compile.ViewOp):
"""
Here we clip the gradients as Alex Graves does in his
recurrent neural networks. In particular this prevents
explosion of gradients during backpropagation.
The original poster of this code was Alex Lamb,
[here](https://groups.google.com/forum/#!topic/theano-dev/GaJwGw6emK0).
"""
def __init__(self, clip_lower_bound, clip_upper_bound):
self.clip_lower_bound = clip_lower_bound
self.clip_upper_bound = clip_upper_bound
assert(self.clip_upper_bound >= self.clip_lower_bound)
def grad(self, args, g_outs):
return [T.clip(g_out, self.clip_lower_bound, self.clip_upper_bound) for g_out in g_outs]
def clip_gradient(x, bound):
grad_clip = GradClip(-bound, bound)
try:
T.opt.register_canonicalize(theano.gof.OpRemove(grad_clip), name='grad_clip_%.1f' % (bound))
except ValueError:
pass
return grad_clip(x)
def create_shared(out_size, in_size=None, name=None):
"""
Creates a shared matrix or vector
using the given in_size and out_size.
Inputs
------
out_size int : outer dimension of the
vector or matrix
in_size int (optional) : for a matrix, the inner
dimension.
Outputs
-------
theano shared : the shared matrix, with random numbers in it
"""
if in_size is None:
return theano.shared(random_initialization((out_size, )), name=name)
else:
return theano.shared(random_initialization((out_size, in_size)), name=name)
def random_initialization(size):
return (np_rng.standard_normal(size) * 1. / size[0]).astype(theano.config.floatX)
def Dropout(shape, prob):
"""
Return a dropout mask on x.
The probability of a value in x going to zero is prob.
Inputs
------
x theano variable : the variable to add noise to
prob float, variable : probability of dropping an element.
size tuple(int, int) : size of the dropout mask.
Outputs
-------
y theano variable : x with the noise multiplied.
"""
mask = srng.binomial(n=1, p=1-prob, size=shape)
return T.cast(mask, theano.config.floatX)
def MultiDropout(shapes, dropout = 0.):
"""
Return all the masks needed for dropout outside of a scan loop.
"""
return [Dropout(shape, dropout) for shape in shapes]
class Layer(object):
"""
Base object for neural network layers.
A layer has an input set of neurons, and
a hidden activation. The activation, f, is a
function applied to the affine transformation
of x by the connection matrix W, and the bias
vector b.
> y = f ( W * x + b )
"""
def __init__(self, input_size, hidden_size, activation, clip_gradients=False):
self.input_size = input_size
self.hidden_size = hidden_size
self.activation = activation
self.clip_gradients = clip_gradients
self.is_recursive = False
self.create_variables()
def create_variables(self):
"""
Create the connection matrix and the bias vector
"""
self.linear_matrix = create_shared(self.hidden_size, self.input_size, name="Layer.linear_matrix")
self.bias_matrix = create_shared(self.hidden_size, name="Layer.bias_matrix")
def activate(self, x):
"""
The hidden activation of the network
"""
if self.clip_gradients is not False:
x = clip_gradient(x, self.clip_gradients)
if x.ndim > 1:
return self.activation(
T.dot(self.linear_matrix, x.T) + self.bias_matrix[:,None] ).T
else:
return self.activation(
T.dot(self.linear_matrix, x) + self.bias_matrix )
@property
def params(self):
return [self.linear_matrix, self.bias_matrix]
@params.setter
def params(self, param_list):
self.linear_matrix.set_value(param_list[0].get_value())
self.bias_matrix.set_value(param_list[1].get_value())
class Embedding(Layer):
"""
A Matrix useful for storing word vectors or other distributed
representations.
use #activate(T.iscalar()) or #activate(T.ivector()) to embed
a symbol.
"""
def __init__(self, vocabulary_size, hidden_size):
"""
Vocabulary size is the number of different symbols to store,
and hidden_size is the size of their embedding.
"""
self.vocabulary_size = vocabulary_size
self.hidden_size = hidden_size
self.create_variables()
self.is_recursive = False
def create_variables(self):
self.embedding_matrix = create_shared(self.vocabulary_size, self.hidden_size, name='Embedding.embedding_matrix')
def activate(self, x):
"""
Inputs
------
x T.ivector() or T.iscalar() : indices to embed
Output
------
embedding : self.embedding_matrix[x]
"""
return self.embedding_matrix[x]
@property
def params(self):
return [self.embedding_matrix]
@params.setter
def params(self, param_list):
self.embedding_matrix.set_value(param_list[0].get_value())
class RNN(Layer):
"""
Special recurrent layer than takes as input
a hidden activation, h, from the past and
an observation x.
> y = f ( W * [x, h] + b )
Note: x and h are concatenated in the activation.
"""
def __init__(self, *args, **kwargs):
super(RNN, self).__init__(*args, **kwargs)
self.is_recursive = True
def create_variables(self):
"""
Create the connection matrix and the bias vector,
and the base hidden activation.
"""
self.linear_matrix = create_shared(self.hidden_size, self.input_size+ self.hidden_size, name="RNN.linear_matrix")
self.bias_matrix = create_shared(self.hidden_size, name="RNN.bias_matrix")
self.initial_hidden_state = create_shared(self.hidden_size, name="RNN.initial_hidden_state")
def activate(self, x, h):
"""
The hidden activation of the network
"""
if self.clip_gradients is not False:
x = clip_gradient(x, self.clip_gradients)
h = clip_gradient(h, self.clip_gradients)
if x.ndim > 1:
return self.activation(
T.dot(
self.linear_matrix,
T.concatenate([x, h], axis=1).T
) + self.bias_matrix[:,None] ).T
else:
return self.activation(
T.dot(
self.linear_matrix,
T.concatenate([x, h])
) + self.bias_matrix )
@property
def params(self):
return [self.linear_matrix, self.bias_matrix]
@params.setter
def params(self, param_list):
self.linear_matrix.set_value(param_list[0].get_value())
self.bias_matrix.set_value(param_list[1].get_value())
class GRU(RNN):
def create_variables(self):
self.reset_layer = theano_lstm.RNN(self.input_size, self.hidden_size, activation = T.nnet.sigmoid)
self.memory_interpolation_layer = theano_lstm.RNN(self.input_size, self.hidden_size, activation = T.nnet.sigmoid)
self.memory_to_memory_layer = theano_lstm.RNN(self.input_size, self.hidden_size, activation = T.tanh)
self.internal_layers = [
self.reset_layer,
self.memory_interpolation_layer,
self.memory_to_memory_layer
]
@property
def params(self):
return [param for layer in self.internal_layers for param in layer.params]
@params.setter
def params(self, param_list):
assert(len(param_list) == 6)
self.reset_layer.params = param_list[0:2]
self.memory_interpolation_layer.params = param_list[2:4]
self.memory_to_memory_layer.params = param_list[4:6]
def activate(self, x, h):
reset_gate = self.reset_layer.activate(
x,
h
)
# the new state dampened by resetting
reset_h = reset_gate * h;
# the new hidden state:
candidate_h = self.memory_to_memory_layer.activate(
x,
reset_h
)
# how much to update the new hidden state:
update_gate = self.memory_interpolation_layer.activate(
x,
h
)
# the new state interploated between candidate and old:
new_h = (
h * (1.0 - update_gate) +
candidate_h * update_gate
)
return new_h
class LSTM(RNN):
"""
The structure of the LSTM allows it to learn on problems with
long term dependencies relatively easily. The "long term"
memory is stored in a vector of memory cells c.
Although many LSTM architectures differ in their connectivity
structure and activation functions, all LSTM architectures have
memory cells that are suitable for storing information for long
periods of time. Here we implement the LSTM from Graves et al.
(2013).
"""
def create_variables(self):
"""
Create the different LSTM gates and
their variables, along with the initial
hidden state for the memory cells and
the initial hidden activation.
"""
# input gate for cells
self.in_gate = Layer(self.input_size + self.hidden_size, self.hidden_size, T.nnet.sigmoid, self.clip_gradients)
# forget gate for cells
self.forget_gate = Layer(self.input_size + self.hidden_size, self.hidden_size, T.nnet.sigmoid, self.clip_gradients)
# input modulation for cells
self.in_gate2 = Layer(self.input_size + self.hidden_size, self.hidden_size, self.activation, self.clip_gradients)
# output modulation
self.out_gate = Layer(self.input_size + self.hidden_size, self.hidden_size, T.nnet.sigmoid, self.clip_gradients)
# keep these layers organized
self.internal_layers = [self.in_gate, self.forget_gate, self.in_gate2, self.out_gate]
# store the memory cells in first n spots, and store the current
# output in the next n spots:
self.initial_hidden_state = create_shared(self.hidden_size * 2, name="LSTM.initial_hidden_state")
@property
def params(self):
"""
Parameters given by the 4 gates and the
initial hidden activation of this LSTM cell
layer.
"""
return [param for layer in self.internal_layers for param in layer.params]
@params.setter
def params(self, param_list):
start = 0
for layer in self.internal_layers:
end = start + len(layer.params)
layer.params = param_list[start:end]
start = end
def postprocess_activation(self, x, *args):
if x.ndim > 1:
return x[:, self.hidden_size:]
else:
return x[self.hidden_size:]
def activate(self, x, h):
"""
The hidden activation, h, of the network, along
with the new values for the memory cells, c,
Both are concatenated as follows:
> y = f( x, past )
Or more visibly, with past = [prev_c, prev_h]
> [c, h] = f( x, [prev_c, prev_h] )
"""
if h.ndim > 1:
#previous memory cell values
prev_c = h[:, :self.hidden_size]
#previous activations of the hidden layer
prev_h = h[:, self.hidden_size:]
else:
#previous memory cell values
prev_c = h[:self.hidden_size]
#previous activations of the hidden layer
prev_h = h[self.hidden_size:]
# input and previous hidden constitute the actual
# input to the LSTM:
if h.ndim > 1:
obs = T.concatenate([x, prev_h], axis=1)
else:
obs = T.concatenate([x, prev_h])
# TODO could we combine these 4 linear transformations for efficiency? (e.g., http://arxiv.org/pdf/1410.4615.pdf, page 5)
# how much to add to the memory cells
in_gate = self.in_gate.activate(obs)
# how much to forget the current contents of the memory
forget_gate = self.forget_gate.activate(obs)
# modulate the input for the memory cells
in_gate2 = self.in_gate2.activate(obs)
# new memory cells
next_c = forget_gate * prev_c + in_gate2 * in_gate
# modulate the memory cells to create the new output
out_gate = self.out_gate.activate(obs)
# new hidden output
next_h = out_gate * T.tanh(next_c)
if h.ndim > 1:
return T.concatenate([next_c, next_h], axis=1)
else:
return T.concatenate([next_c, next_h])
class GatedInput(RNN):
def create_variables(self):
# input gate for cells
self.in_gate = Layer(self.input_size + self.hidden_size, 1, T.nnet.sigmoid, self.clip_gradients)
self.internal_layers = [self.in_gate]
@property
def params(self):
"""
Parameters given by the 4 gates and the
initial hidden activation of this LSTM cell
layer.
"""
return [param for layer in self.internal_layers
for param in layer.params]
@params.setter
def params(self, param_list):
start = 0
for layer in self.internal_layers:
end = start + len(layer.params)
layer.params = param_list[start:end]
start = end
def activate(self, x, h):
# input and previous hidden constitute the actual
# input to the LSTM:
if h.ndim > 1:
obs = T.concatenate([x, h], axis=1)
else:
obs = T.concatenate([x, h])
gate = self.in_gate.activate(obs)
if h.ndim > 1:
gate = gate[:,0][:,None]
else:
gate = gate[0]
return gate
def postprocess_activation(self, gate, x, h):
return gate * x
def apply_dropout(x, mask):
if mask is not None:
return mask * x
else:
return x
class StackedCells(object):
"""
Sequentially connect several recurrent layers.
celltypes can be RNN or LSTM.
"""
def __init__(self, input_size, celltype=RNN, layers=None,
activation=lambda x:x, clip_gradients=False):
if layers is None:
layers = []
self.input_size = input_size
self.clip_gradients = clip_gradients
self.create_layers(layers, activation, celltype)
def create_layers(self, layer_sizes, activation_type, celltype):
self.layers = []
prev_size = self.input_size
for k, layer_size in enumerate(layer_sizes):
layer = celltype(prev_size, layer_size, activation_type,
clip_gradients=self.clip_gradients)
self.layers.append(layer)
prev_size = layer_size
@property
def params(self):
return [param for layer in self.layers for param in layer.params]
@params.setter
def params(self, param_list):
start = 0
for layer in self.layers:
end = start + len(layer.params)
layer.params = param_list[start:end]
start = end
def forward(self, x, prev_hiddens=None, dropout=None):
"""
Return new hidden activations for all stacked RNNs
"""
if dropout is None:
dropout = []
if prev_hiddens is None:
prev_hiddens = [(T.repeat(T.shape_padleft(layer.initial_hidden_state),
x.shape[0], axis=0)
if x.ndim > 1 else layer.initial_hidden_state)
if hasattr(layer, 'initial_hidden_state') else None
for layer in self.layers]
out = []
layer_input = x
for k, layer in enumerate(self.layers):
level_out = layer_input
if len(dropout) > 0:
level_out = apply_dropout(layer_input, dropout[k])
if layer.is_recursive:
level_out = layer.activate(level_out, prev_hiddens[k])
else:
level_out = layer.activate(level_out)
out.append(level_out)
# deliberate choice to change the upward structure here
# in an RNN, there is only one kind of hidden values
if hasattr(layer, 'postprocess_activation'):
# in this case the hidden activation has memory cells
# that are not shared upwards
# along with hidden activations that can be sent
# updwards
if layer.is_recursive:
level_out = layer.postprocess_activation(level_out, layer_input, prev_hiddens[k])
else:
level_out = layer.postprocess_activation(level_out, layer_input)
layer_input = level_out
return out
def create_optimization_updates(cost, params, updates=None, max_norm=5.0,
lr=0.01, eps=1e-6, rho=0.95,
method = "adadelta", gradients = None):
"""
Get the updates for a gradient descent optimizer using
SGD, AdaDelta, or AdaGrad.
Returns the shared variables for the gradient caches,
and the updates dictionary for compilation by a
theano function.
Inputs
------
cost theano variable : what to minimize
params list : list of theano variables
with respect to which
the gradient is taken.
max_norm float : cap on excess gradients
lr float : base learning rate for
adagrad and SGD
eps float : numerical stability value
to not divide by zero
sometimes
rho float : adadelta hyperparameter.
method str : 'adagrad', 'adadelta', or 'sgd'.
Outputs:
--------
updates OrderedDict : the updates to pass to a
theano function
gsums list : gradient caches for Adagrad
and Adadelta
xsums list : gradient caches for AdaDelta only
lr theano shared : learning rate
max_norm theano_shared : normalizing clipping value for
excessive gradients (exploding).
"""
lr = theano.shared(np.float64(lr).astype(theano.config.floatX))
eps = np.float64(eps).astype(theano.config.floatX)
rho = theano.shared(np.float64(rho).astype(theano.config.floatX))
if max_norm is not None and max_norm is not False:
max_norm = theano.shared(np.float64(max_norm).astype(theano.config.floatX))
gsums = [theano.shared(np.zeros_like(param.get_value(borrow=True))) if (method == 'adadelta' or method == 'adagrad') else None for param in params]
xsums = [theano.shared(np.zeros_like(param.get_value(borrow=True))) if method == 'adadelta' else None for param in params]
gparams = T.grad(cost, params) if gradients is None else gradients
if updates is None:
updates = OrderedDict()
for gparam, param, gsum, xsum in zip(gparams, params, gsums, xsums):
# clip gradients if they get too big
if max_norm is not None and max_norm is not False:
grad_norm = gparam.norm(L=2)
gparam = (T.minimum(max_norm, grad_norm)/ (grad_norm + eps)) * gparam
if method == 'adadelta':
updates[gsum] = T.cast(rho * gsum + (1. - rho) * (gparam **2), theano.config.floatX)
dparam = -T.sqrt((xsum + eps) / (updates[gsum] + eps)) * gparam
updates[xsum] = T.cast(rho * xsum + (1. - rho) * (dparam **2), theano.config.floatX)
updates[param] = T.cast(param + dparam, theano.config.floatX)
elif method == 'adagrad':
updates[gsum] = T.cast(gsum + (gparam ** 2), theano.config.floatX)
updates[param] = T.cast(param - lr * (gparam / (T.sqrt(updates[gsum] + eps))), theano.config.floatX)
else:
updates[param] = param - gparam * lr
if method == 'adadelta':
lr = rho
return updates, gsums, xsums, lr, max_norm
__all__ = [
"create_optimization_updates",
"masked_loss",
"masked_loss_dx",
"clip_gradient",
"create_shared",
"Dropout",
"apply_dropout",
"StackedCells",
"Layer",
"LSTM",
"RNN",
"GatedInput",
"Embedding",
"MultiDropout",
"wrap_params",
"borrow_memory",
"borrow_all_memories"
]
|
bsd-3-clause
|
sanyaade-teachings/gyp
|
test/subdirectory/gyptest-top-all.py
|
261
|
1373
|
#!/usr/bin/env python
# Copyright (c) 2009 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies building a target and a subsidiary dependent target from a
.gyp file in a subdirectory, without specifying an explicit output build
directory, and using the generated solution or project file at the top
of the tree as the entry point.
There is a difference here in the default behavior of the underlying
build tools. Specifically, when building the entire "solution", Xcode
puts the output of each project relative to the .xcodeproj directory,
while Visual Studio (and our implementation of Make) put it
in a build directory relative to the "solution"--that is, the entry-point
from which you built the entire tree.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('prog1.gyp', chdir='src')
test.relocate('src', 'relocate/src')
test.build('prog1.gyp', test.ALL, chdir='relocate/src')
test.run_built_executable('prog1',
stdout="Hello from prog1.c\n",
chdir='relocate/src')
if test.format == 'xcode':
chdir = 'relocate/src/subdir'
else:
chdir = 'relocate/src'
test.run_built_executable('prog2',
chdir=chdir,
stdout="Hello from prog2.c\n")
test.pass_test()
|
bsd-3-clause
|
ezequielpereira/Time-Line
|
autopilot/autopilotlib/manuscript/instructionpopup.py
|
3
|
1635
|
# Copyright (C) 2009, 2010, 2011 Rickard Lindberg, Roger Lindberg
#
# This file is part of Timeline.
#
# Timeline is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Timeline is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Timeline. If not, see <http://www.gnu.org/licenses/>.
import wx
class InstructionPopup(wx.PopupWindow):
def __init__(self, parent):
self.parent = parent
wx.PopupWindow.__init__(self, parent, wx.SIMPLE_BORDER)
self._create_gui()
self.Show(True)
wx.CallAfter(self.Refresh)
def SetText(self, text):
self.st.SetLabel(text)
def _create_gui(self):
self.SetBackgroundColour("GOLDENROD")
self.st = wx.StaticText(self, -1, "", pos=(10,10))
sz = self.st.GetBestSize()
self.SetSize((sz.width + 20 + 350, sz.height + 20))
w, h = wx.DisplaySize()
w1, h1 = self.GetSize()
x = w - w1 - 20
y = h - 2.5 * h1
self.SetPosition((x,y))
# TODO:
# Adjustments for two screens
# displays = (wx.Display(i) for i in range(wx.Display.GetCount()))
# sizes = [display.GetGeometry().GetSize() for display in displays]
|
gpl-3.0
|
shujaatak/UAV_MissionPlanner
|
Lib/site-packages/numpy/distutils/npy_pkg_config.py
|
53
|
13468
|
import sys
if sys.version_info[0] < 3:
from ConfigParser import SafeConfigParser, NoOptionError
else:
from configparser import SafeConfigParser, NoOptionError
import re
import os
import shlex
__all__ = ['FormatError', 'PkgNotFound', 'LibraryInfo', 'VariableSet',
'read_config', 'parse_flags']
_VAR = re.compile('\$\{([a-zA-Z0-9_-]+)\}')
class FormatError(IOError):
"""
Exception thrown when there is a problem parsing a configuration file.
"""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
class PkgNotFound(IOError):
"""Exception raised when a package can not be located."""
def __init__(self, msg):
self.msg = msg
def __str__(self):
return self.msg
def parse_flags(line):
"""
Parse a line from a config file containing compile flags.
Parameters
----------
line : str
A single line containing one or more compile flags.
Returns
-------
d : dict
Dictionary of parsed flags, split into relevant categories.
These categories are the keys of `d`:
* 'include_dirs'
* 'library_dirs'
* 'libraries'
* 'macros'
* 'ignored'
"""
lexer = shlex.shlex(line)
lexer.whitespace_split = True
d = {'include_dirs': [], 'library_dirs': [], 'libraries': [],
'macros': [], 'ignored': []}
def next_token(t):
if t.startswith('-I'):
if len(t) > 2:
d['include_dirs'].append(t[2:])
else:
t = lexer.get_token()
d['include_dirs'].append(t)
elif t.startswith('-L'):
if len(t) > 2:
d['library_dirs'].append(t[2:])
else:
t = lexer.get_token()
d['library_dirs'].append(t)
elif t.startswith('-l'):
d['libraries'].append(t[2:])
elif t.startswith('-D'):
d['macros'].append(t[2:])
else:
d['ignored'].append(t)
return lexer.get_token()
t = lexer.get_token()
while t:
t = next_token(t)
return d
def _escape_backslash(val):
return val.replace('\\', '\\\\')
class LibraryInfo(object):
"""
Object containing build information about a library.
Parameters
----------
name : str
The library name.
description : str
Description of the library.
version : str
Version string.
sections : dict
The sections of the configuration file for the library. The keys are
the section headers, the values the text under each header.
vars : class instance
A `VariableSet` instance, which contains ``(name, value)`` pairs for
variables defined in the configuration file for the library.
requires : sequence, optional
The required libraries for the library to be installed.
Notes
-----
All input parameters (except "sections" which is a method) are available as
attributes of the same name.
"""
def __init__(self, name, description, version, sections, vars, requires=None):
self.name = name
self.description = description
if requires:
self.requires = requires
else:
self.requires = []
self.version = version
self._sections = sections
self.vars = vars
def sections(self):
"""
Return the section headers of the config file.
Parameters
----------
None
Returns
-------
keys : list of str
The list of section headers.
"""
return self._sections.keys()
def cflags(self, section="default"):
val = self.vars.interpolate(self._sections[section]['cflags'])
return _escape_backslash(val)
def libs(self, section="default"):
val = self.vars.interpolate(self._sections[section]['libs'])
return _escape_backslash(val)
def __str__(self):
m = ['Name: %s' % self.name]
m.append('Description: %s' % self.description)
if self.requires:
m.append('Requires:')
else:
m.append('Requires: %s' % ",".join(self.requires))
m.append('Version: %s' % self.version)
return "\n".join(m)
class VariableSet(object):
"""
Container object for the variables defined in a config file.
`VariableSet` can be used as a plain dictionary, with the variable names
as keys.
Parameters
----------
d : dict
Dict of items in the "variables" section of the configuration file.
"""
def __init__(self, d):
self._raw_data = dict([(k, v) for k, v in d.items()])
self._re = {}
self._re_sub = {}
self._init_parse()
def _init_parse(self):
for k, v in self._raw_data.items():
self._init_parse_var(k, v)
def _init_parse_var(self, name, value):
self._re[name] = re.compile(r'\$\{%s\}' % name)
self._re_sub[name] = value
def interpolate(self, value):
# Brute force: we keep interpolating until there is no '${var}' anymore
# or until interpolated string is equal to input string
def _interpolate(value):
for k in self._re.keys():
value = self._re[k].sub(self._re_sub[k], value)
return value
while _VAR.search(value):
nvalue = _interpolate(value)
if nvalue == value:
break
value = nvalue
return value
def variables(self):
"""
Return the list of variable names.
Parameters
----------
None
Returns
-------
names : list of str
The names of all variables in the `VariableSet` instance.
"""
return self._raw_data.keys()
# Emulate a dict to set/get variables values
def __getitem__(self, name):
return self._raw_data[name]
def __setitem__(self, name, value):
self._raw_data[name] = value
self._init_parse_var(name, value)
def parse_meta(config):
if not config.has_section('meta'):
raise FormatError("No meta section found !")
d = {}
for name, value in config.items('meta'):
d[name] = value
for k in ['name', 'description', 'version']:
if not d.has_key(k):
raise FormatError("Option %s (section [meta]) is mandatory, "
"but not found" % k)
if not d.has_key('requires'):
d['requires'] = []
return d
def parse_variables(config):
if not config.has_section('variables'):
raise FormatError("No variables section found !")
d = {}
for name, value in config.items("variables"):
d[name] = value
return VariableSet(d)
def parse_sections(config):
return meta_d, r
def pkg_to_filename(pkg_name):
return "%s.ini" % pkg_name
def parse_config(filename, dirs=None):
if dirs:
filenames = [os.path.join(d, filename) for d in dirs]
else:
filenames = [filename]
config = SafeConfigParser()
n = config.read(filenames)
if not len(n) >= 1:
raise PkgNotFound("Could not find file(s) %s" % str(filenames))
# Parse meta and variables sections
meta = parse_meta(config)
vars = {}
if config.has_section('variables'):
for name, value in config.items("variables"):
vars[name] = _escape_backslash(value)
# Parse "normal" sections
secs = [s for s in config.sections() if not s in ['meta', 'variables']]
sections = {}
requires = {}
for s in secs:
d = {}
if config.has_option(s, "requires"):
requires[s] = config.get(s, 'requires')
for name, value in config.items(s):
d[name] = value
sections[s] = d
return meta, vars, sections, requires
def _read_config_imp(filenames, dirs=None):
def _read_config(f):
meta, vars, sections, reqs = parse_config(f, dirs)
# recursively add sections and variables of required libraries
for rname, rvalue in reqs.items():
nmeta, nvars, nsections, nreqs = _read_config(pkg_to_filename(rvalue))
# Update var dict for variables not in 'top' config file
for k, v in nvars.items():
if not vars.has_key(k):
vars[k] = v
# Update sec dict
for oname, ovalue in nsections[rname].items():
if ovalue:
sections[rname][oname] += ' %s' % ovalue
return meta, vars, sections, reqs
meta, vars, sections, reqs = _read_config(filenames)
# FIXME: document this. If pkgname is defined in the variables section, and
# there is no pkgdir variable defined, pkgdir is automatically defined to
# the path of pkgname. This requires the package to be imported to work
if not vars.has_key("pkgdir") and vars.has_key("pkgname"):
pkgname = vars["pkgname"]
if not pkgname in sys.modules:
raise ValueError("You should import %s to get information on %s" %
(pkgname, meta["name"]))
mod = sys.modules[pkgname]
vars["pkgdir"] = _escape_backslash(os.path.dirname(mod.__file__))
return LibraryInfo(name=meta["name"], description=meta["description"],
version=meta["version"], sections=sections, vars=VariableSet(vars))
# Trivial cache to cache LibraryInfo instances creation. To be really
# efficient, the cache should be handled in read_config, since a same file can
# be parsed many time outside LibraryInfo creation, but I doubt this will be a
# problem in practice
_CACHE = {}
def read_config(pkgname, dirs=None):
"""
Return library info for a package from its configuration file.
Parameters
----------
pkgname : str
Name of the package (should match the name of the .ini file, without
the extension, e.g. foo for the file foo.ini).
dirs : sequence, optional
If given, should be a sequence of directories - usually including
the NumPy base directory - where to look for npy-pkg-config files.
Returns
-------
pkginfo : class instance
The `LibraryInfo` instance containing the build information.
Raises
------
PkgNotFound
If the package is not found.
See Also
--------
misc_util.get_info, misc_util.get_pkg_info
Examples
--------
>>> npymath_info = np.distutils.npy_pkg_config.read_config('npymath')
>>> type(npymath_info)
<class 'numpy.distutils.npy_pkg_config.LibraryInfo'>
>>> print npymath_info
Name: npymath
Description: Portable, core math library implementing C99 standard
Requires:
Version: 0.1 #random
"""
try:
return _CACHE[pkgname]
except KeyError:
v = _read_config_imp(pkg_to_filename(pkgname), dirs)
_CACHE[pkgname] = v
return v
# TODO:
# - implements version comparison (modversion + atleast)
# pkg-config simple emulator - useful for debugging, and maybe later to query
# the system
if __name__ == '__main__':
import sys
from optparse import OptionParser
import glob
parser = OptionParser()
parser.add_option("--cflags", dest="cflags", action="store_true",
help="output all preprocessor and compiler flags")
parser.add_option("--libs", dest="libs", action="store_true",
help="output all linker flags")
parser.add_option("--use-section", dest="section",
help="use this section instead of default for options")
parser.add_option("--version", dest="version", action="store_true",
help="output version")
parser.add_option("--atleast-version", dest="min_version",
help="Minimal version")
parser.add_option("--list-all", dest="list_all", action="store_true",
help="Minimal version")
parser.add_option("--define-variable", dest="define_variable",
help="Replace variable with the given value")
(options, args) = parser.parse_args(sys.argv)
if len(args) < 2:
raise ValueError("Expect package name on the command line:")
if options.list_all:
files = glob.glob("*.ini")
for f in files:
info = read_config(f)
print ("%s\t%s - %s" % (info.name, info.name, info.description))
pkg_name = args[1]
import os
d = os.environ.get('NPY_PKG_CONFIG_PATH')
if d:
info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.', d])
else:
info = read_config(pkg_name, ['numpy/core/lib/npy-pkg-config', '.'])
if options.section:
section = options.section
else:
section = "default"
if options.define_variable:
m = re.search('([\S]+)=([\S]+)', options.define_variable)
if not m:
raise ValueError("--define-variable option should be of " \
"the form --define-variable=foo=bar")
else:
name = m.group(1)
value = m.group(2)
info.vars[name] = value
if options.cflags:
print (info.cflags(section))
if options.libs:
print (info.libs(section))
if options.version:
print (info.version)
if options.min_version:
print (info.version >= options.min_version)
|
gpl-2.0
|
slightlymadphoenix/activityPointsApp
|
activitypoints/lib/python3.5/site-packages/django/db/backends/utils.py
|
39
|
7380
|
from __future__ import unicode_literals
import datetime
import decimal
import hashlib
import logging
import re
from time import time
from django.conf import settings
from django.utils.encoding import force_bytes
from django.utils.timezone import utc
logger = logging.getLogger('django.db.backends')
class CursorWrapper(object):
def __init__(self, cursor, db):
self.cursor = cursor
self.db = db
WRAP_ERROR_ATTRS = frozenset(['fetchone', 'fetchmany', 'fetchall', 'nextset'])
def __getattr__(self, attr):
cursor_attr = getattr(self.cursor, attr)
if attr in CursorWrapper.WRAP_ERROR_ATTRS:
return self.db.wrap_database_errors(cursor_attr)
else:
return cursor_attr
def __iter__(self):
with self.db.wrap_database_errors:
for item in self.cursor:
yield item
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
# Close instead of passing through to avoid backend-specific behavior
# (#17671). Catch errors liberally because errors in cleanup code
# aren't useful.
try:
self.close()
except self.db.Database.Error:
pass
# The following methods cannot be implemented in __getattr__, because the
# code must run when the method is invoked, not just when it is accessed.
def callproc(self, procname, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.callproc(procname)
else:
return self.cursor.callproc(procname, params)
def execute(self, sql, params=None):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
if params is None:
return self.cursor.execute(sql)
else:
return self.cursor.execute(sql, params)
def executemany(self, sql, param_list):
self.db.validate_no_broken_transaction()
with self.db.wrap_database_errors:
return self.cursor.executemany(sql, param_list)
class CursorDebugWrapper(CursorWrapper):
# XXX callproc isn't instrumented at this time.
def execute(self, sql, params=None):
start = time()
try:
return super(CursorDebugWrapper, self).execute(sql, params)
finally:
stop = time()
duration = stop - start
sql = self.db.ops.last_executed_query(self.cursor, sql, params)
self.db.queries_log.append({
'sql': sql,
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, params,
extra={'duration': duration, 'sql': sql, 'params': params}
)
def executemany(self, sql, param_list):
start = time()
try:
return super(CursorDebugWrapper, self).executemany(sql, param_list)
finally:
stop = time()
duration = stop - start
try:
times = len(param_list)
except TypeError: # param_list could be an iterator
times = '?'
self.db.queries_log.append({
'sql': '%s times: %s' % (times, sql),
'time': "%.3f" % duration,
})
logger.debug(
'(%.3f) %s; args=%s', duration, sql, param_list,
extra={'duration': duration, 'sql': sql, 'params': param_list}
)
###############################################
# Converters from database (string) to Python #
###############################################
def typecast_date(s):
return datetime.date(*map(int, s.split('-'))) if s else None # returns None if s is null
def typecast_time(s): # does NOT store time zone information
if not s:
return None
hour, minutes, seconds = s.split(':')
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
return datetime.time(int(hour), int(minutes), int(seconds), int((microseconds + '000000')[:6]))
def typecast_timestamp(s): # does NOT store time zone information
# "2005-07-29 15:48:00.590358-05"
# "2005-07-29 09:56:00-05"
if not s:
return None
if ' ' not in s:
return typecast_date(s)
d, t = s.split()
# Extract timezone information, if it exists. Currently we just throw
# it away, but in the future we may make use of it.
if '-' in t:
t, tz = t.split('-', 1)
tz = '-' + tz
elif '+' in t:
t, tz = t.split('+', 1)
tz = '+' + tz
else:
tz = ''
dates = d.split('-')
times = t.split(':')
seconds = times[2]
if '.' in seconds: # check whether seconds have a fractional part
seconds, microseconds = seconds.split('.')
else:
microseconds = '0'
tzinfo = utc if settings.USE_TZ else None
return datetime.datetime(
int(dates[0]), int(dates[1]), int(dates[2]),
int(times[0]), int(times[1]), int(seconds),
int((microseconds + '000000')[:6]), tzinfo
)
def typecast_decimal(s):
if s is None or s == '':
return None
return decimal.Decimal(s)
###############################################
# Converters from Python to database (string) #
###############################################
def rev_typecast_decimal(d):
if d is None:
return None
return str(d)
def truncate_name(name, length=None, hash_len=4):
"""
Shorten a string to a repeatable mangled version with the given length.
If a quote stripped name contains a username, e.g. USERNAME"."TABLE,
truncate the table portion only.
"""
match = re.match(r'([^"]+)"\."([^"]+)', name)
table_name = match.group(2) if match else name
if length is None or len(table_name) <= length:
return name
hsh = hashlib.md5(force_bytes(table_name)).hexdigest()[:hash_len]
return '%s%s%s' % (match.group(1) + '"."' if match else '', table_name[:length - hash_len], hsh)
def format_number(value, max_digits, decimal_places):
"""
Formats a number into a string with the requisite number of digits and
decimal places.
"""
if value is None:
return None
if isinstance(value, decimal.Decimal):
context = decimal.getcontext().copy()
if max_digits is not None:
context.prec = max_digits
if decimal_places is not None:
value = value.quantize(decimal.Decimal(".1") ** decimal_places, context=context)
else:
context.traps[decimal.Rounded] = 1
value = context.create_decimal(value)
return "{:f}".format(value)
if decimal_places is not None:
return "%.*f" % (decimal_places, value)
return "{:f}".format(value)
def strip_quotes(table_name):
"""
Strip quotes off of quoted table names to make them safe for use in index
names, sequence names, etc. For example '"USER"."TABLE"' (an Oracle naming
scheme) becomes 'USER"."TABLE'.
"""
has_quotes = table_name.startswith('"') and table_name.endswith('"')
return table_name[1:-1] if has_quotes else table_name
|
mit
|
edudobay/mingus
|
scripts/generate_wiki_docs.py
|
1
|
7140
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""
================================================================================
Music theory Python package
Copyright (C) 2008, 2009, Bart Spaans
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
================================================================================
Build the reference documentation for mingus.
================================================================================
"""
import mingus.containers
import mingus.core
from mingus.core import *
from mingus.containers import *
from mingus.extra import *
from mingus.midi import *
import types
import inspect
import sys
import os
class Documize:
"""Generates documents from modules"""
functions = []
classes = []
attributes = []
def __init__(self, module_string=''):
self.set_module(module_string)
def strip_license(self, text):
"""Strips the license (the first block in ================'s)
from the text"""
try:
res = text.split('=' * 80)
return res[2]
except:
return text
def format_code_examples(self, text):
pass
def generate_module_wikidocs(self):
self.reset()
res = '''#summary Reference documentation for `%s`.
'''\
% self.module_string
res += '''----
= %s =
%s
----
''' % (self.module_string,
self.strip_license(self.module.__doc__))
# Gather all the documentation
for element in dir(self.module):
e = eval('%s.%s' % (self.module_string, element))
if not callable(e):
self.generate_non_callable_docs(element, e)
else:
self.generate_callable_wikidocs(element, e)
# Order it
self.functions.sort()
self.classes.sort()
self.attributes.sort()
# Present attributes
if len(self.attributes) != 0:
res += '''== Attributes ==
'''
for a in self.attributes:
res += a
res += '''----
'''
# Present functions
if len(self.functions) != 0:
res += '''== Functions ==
'''
for f in self.functions:
res += f
res += '''----
'''
res += '[mingusIndex Back to Index]'
return res
def generate_non_callable_docs(self, element_string, evaled):
if element_string[0] != '_' and type(evaled) != types.ModuleType:
t = str(type(evaled))
t = t.split("'")
res = '=== `%s` ===' % element_string
res += '''
* *Type*: %s
''' % t[1]
res += ''' * *Value*: %s
''' % repr(evaled)
self.attributes.append(res)
def generate_callable_wikidocs(self, element_string, evaled):
if type(evaled) in [types.FunctionType, types.MethodType]:
self.functions.append(self.generate_function_wikidocs(element_string,
evaled))
elif type(evaled) == types.ClassType:
print 'CLASS'
else:
# print "Unknown callable object %s " % element_string
pass
def generate_function_wikidocs(self, func_string, func):
res = '=== `%s(' % func_string
argspec = inspect.getargspec(func)
args = argspec[0]
defaults = argspec[3]
def_values = []
# Get the arguments
for n in range(0, len(args)):
try:
if defaults != None and len(defaults) >= len(args) - n:
def_values.append((args[n], defaults[n - (len(args)
- len(defaults))]))
res += '%s, ' % args[n]
except:
res += '%s, ' % args[n]
if res[-1] != '(':
res = res[:-2]
res += ''')` ===
'''
# Add default values (wiki doesn't allow '=' in headers)
if len(def_values) != 0:
res += ' * *Default values*: '
for n in def_values:
res += '%s = %s, ' % (n[0], repr(n[1]))
res = res[:-2] + '\n'
# Add docstring
if func.__doc__ != None:
res += ''' * %s
''' % func.__doc__
return res
def reset(self):
self.functions = []
self.classes = []
self.attributes = []
def set_module(self, module_string):
if module_string != '':
self.module_string = module_string
self.module = eval(module_string)
self.reset()
def output_wiki(self):
return self.generate_module_wikidocs()
def generate_package_wikidocs(package_string, file_prefix='ref',
file_suffix='.wiki'):
d = Documize()
package = eval(package_string)
print '''
Generating documentation for package %s''' % package_string
for element in dir(package):
if not callable(element):
fullname = '%s.%s' % (package_string, element)
if type(eval(fullname)) == types.ModuleType or type(eval(fullname))\
== types.ClassType:
d.set_module(fullname)
wikiname = file_prefix
for parts in fullname.split('.'):
wikiname += parts.capitalize()
wikiname += file_suffix
print 'Writing %s...' % wikiname,
result = d.output_wiki()
try:
f = open(os.path.join(sys.argv[1], wikiname), 'w')
try:
f.write(result)
print 'OK'
except:
print "ERROR. Couldn't write to file."
f.close()
except:
print "ERROR. Couldn't open file for writing."
print 'mingus version 0.4, Copyright (C) 2008-2009, Bart Spaans\n'
print 'mingus comes with ABSOLUTELY NO WARRANTY. This is free'
print 'software and you are welcome to redistribute it under'
print 'certain conditions.'
if len(sys.argv) == 1:
print '\n\nUsage:', sys.argv[0], 'OUTPUT-DIRECTORY'
sys.exit(1)
elif not os.path.isdir(sys.argv[1]):
print '\n\nError: not a valid directory:', sys.argv[1]
sys.exit(1)
generate_package_wikidocs('mingus.core', 'ref', '.wiki')
generate_package_wikidocs('mingus.midi', 'ref', '.wiki')
generate_package_wikidocs('mingus.containers', 'ref', '.wiki')
generate_package_wikidocs('mingus.extra', 'ref', '.wiki')
|
gpl-3.0
|
HaroldMills/Vesper
|
scripts/compare_recording_file_csv_files.py
|
1
|
3968
|
from collections import namedtuple
from pathlib import Path
import csv
DATA_DIR_PATH = Path(
'/Users/harold/Desktop/NFC/Data/MPG Ranch/'
'2016 MPG Ranch Recording Files Comparison')
DEBBIE_FILE_PATH = DATA_DIR_PATH / 'Recording Files Debbie.csv'
HAROLD_FILE_PATH = DATA_DIR_PATH / 'Recording Files Harold.csv'
OUTPUT_FILE_PATH = DATA_DIR_PATH / 'Differences.txt'
def main():
d_files = read_file(DEBBIE_FILE_PATH)
h_files = read_file(HAROLD_FILE_PATH)
d_names = frozenset(d_files.keys())
h_names = frozenset(h_files.keys())
d_extra_names = d_names - h_names
h_extra_names = h_names - d_names
differing_names = get_differing_file_names(d_files, h_files)
write_output_file(
d_extra_names, h_extra_names, differing_names, d_files, h_files)
def read_file(path):
with open(path) as file_:
reader = csv.reader(file_)
files = [File(*row) for row in reader]
return dict((f.name, f) for f in files)
def get_differing_file_names(d_files, h_files):
d_names = frozenset(d_files.keys())
h_names = frozenset(h_files.keys())
names = d_names & h_names
differing_names = [n for n in names if d_files[n] != h_files[n]]
return frozenset(differing_names)
def write_output_file(
d_extra_names, h_extra_names, differing_names, d_files, h_files):
with open(OUTPUT_FILE_PATH, 'w') as f:
w = OutputWriter(f)
list_extra_file_names(w, 'Debbie', 'Harold', d_extra_names)
list_extra_file_names(w, 'Harold', 'Debbie', h_extra_names)
list_differing_files(w, differing_names, d_files, h_files)
list_files_to_send_to_harold(w, d_extra_names, differing_names)
list_total_sizes(w, d_files, d_extra_names, differing_names)
def list_extra_file_names(w, name_a, name_b, file_names):
w.write(
f'{len(file_names)} files that {name_a} has that {name_b} does not:')
for name in sort_names(file_names):
w.write(f' {name}')
def sort_names(names):
return sorted(names, key=lambda n: n.lower())
def list_differing_files(w, names, d_files, h_files):
w.write(
f'{len(names)} files that Debbie and Harold have different '
f'versions of:')
for name in sort_names(names):
d_file = d_files[name]
h_file = h_files[name]
w.write(f' {name}')
w.write(f' Debbie: {d_file[1:]}')
w.write(f' Harold: {h_file[1:]}')
def list_files_to_send_to_harold(w, d_extra_names, differing_names):
names = d_extra_names | differing_names
w.write(f'{len(names)} files for Debbie to send to Harold:')
for name in sort_names(names):
w.write(f' {name}')
def list_total_sizes(w, d_files, d_extra_names, differing_names):
total_size = get_size(d_files)
extra_size = get_size(d_files, d_extra_names)
differing_size = get_size(d_files, differing_names)
gig = 2 ** 30
total_gb = total_size / gig
min_gb = (extra_size + differing_size) / gig
w.write(f"Total size of Debbie's {len(d_files)} files: {total_gb} GB")
h_file_count = len(d_extra_names) + len(differing_names)
w.write(f'Total size of {h_file_count} files Harold needs: {min_gb} GB')
def get_size(files, names=None):
if names is None:
names = files.keys()
return sum(get_size_aux(files[n]) for n in names)
def get_size_aux(f):
return int(f.length) * int(f.channel_count) * int(f.sample_size) // 8
File = namedtuple(
'File', [
'name', 'sample_rate', 'length', 'channel_count', 'sample_size',
'comp_type', 'comp_name'])
class OutputWriter:
def __init__(self, file_):
self._file = file_
def write(self, *args):
print(*args, file=self._file)
if __name__ == '__main__':
main()
|
mit
|
mrocklin/unification
|
unification/tests/test_more.py
|
1
|
2685
|
from collections import namedtuple
from unification.more import (unify_object, reify_object,
unifiable)
from unification import var, variables
from unification.core import unify, reify, _unify, _reify
class Foo(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return (self.a, self.b) == (other.a, other.b)
class Bar(object):
def __init__(self, c):
self.c = c
def __eq__(self, other):
return self.c == other.c
def test_unify_object():
assert unify_object(Foo(1, 2), Foo(1, 2), {}) == {}
assert unify_object(Foo(1, 2), Foo(1, 3), {}) == False
assert unify_object(Foo(1, 2), Foo(1, var(3)), {}) == {var(3): 2}
def test_reify_object():
obj = reify_object(Foo(1, var(3)), {var(3): 4})
assert obj.a == 1
assert obj.b == 4
f = Foo(1, 2)
assert reify_object(f, {}) is f
def test_reify_slots():
class SlotsObject(object):
__slots__ = ['myattr']
def __init__(self, myattr):
self.myattr = myattr
x = var()
s = {x: 1}
e = SlotsObject(x)
assert reify_object(e, s), SlotsObject(1)
assert reify_object(SlotsObject(1), s), SlotsObject(1)
def test_objects_full():
_unify.add((Foo, Foo, dict), unify_object)
_unify.add((Bar, Bar, dict), unify_object)
_reify.add((Foo, dict), reify_object)
_reify.add((Bar, dict), reify_object)
assert unify_object(Foo(1, Bar(2)), Foo(1, Bar(var(3))), {}) == {var(3): 2}
assert reify(Foo(var('a'), Bar(Foo(var('b'), 3))),
{var('a'): 1, var('b'): 2}) == Foo(1, Bar(Foo(2, 3)))
def test_unify_slice():
x = var('x')
y = var('y')
assert unify(slice(1), slice(1), {}) == {}
assert unify(slice(1, 2, 3), x, {}) == {x: slice(1, 2, 3)}
assert unify(slice(1, 2, None), slice(x, y), {}) == {x: 1, y: 2}
def test_reify_slice():
x = var('x')
assert reify(slice(1, var(2), 3), {var(2): 10}) == slice(1, 10, 3)
@unifiable
class A(object):
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return self.__dict__ == other.__dict__
def test_unifiable():
x = var('x')
f = A(1, 2)
g = A(1, x)
assert unify(f, g, {}) == {x: 2}
assert reify(g, {x: 2}) == f
@unifiable
class Aslot(object):
slots = 'a', 'b'
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return self.__dict__ == other.__dict__
def test_unifiable():
x = var('x')
f = Aslot(1, 2)
g = Aslot(1, x)
assert unify(f, g, {}) == {x: 2}
assert reify(g, {x: 2}) == f
|
bsd-3-clause
|
stiandre/sdhash-integration
|
external/tools/build/v2/test/inherit_toolset.py
|
20
|
1175
|
#!/usr/bin/python
# Copyright 2003 Vladimir Prus
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt)
import BoostBuild
import string
t = BoostBuild.Tester(pass_toolset=0)
t.write("a.cpp", """
""")
t.write("yfc1.jam", """
import feature ;
import generators ;
feature.extend toolset : yfc1 ;
rule init ( ) { }
generators.register-standard yfc1.compile : CPP : OBJ : <toolset>yfc1 ;
generators.register-standard yfc1.link : OBJ : EXE : <toolset>yfc1 ;
actions compile { yfc1-compile }
actions link { yfc1-link }
""")
t.write("yfc2.jam", """
import feature ;
import toolset ;
feature.extend toolset : yfc2 ;
toolset.inherit yfc2 : yfc1 ;
rule init ( ) { }
actions link { yfc2-link }
""")
t.write("jamfile.jam", """
exe a : a.cpp ;
""")
t.write("jamroot.jam", """
using yfc1 ;
""")
t.run_build_system("-n -d2 yfc1")
t.fail_test(string.find(t.stdout(), "yfc1-link") == -1)
# Make sure we do not have to explicitly 'use' yfc1.
t.write("jamroot.jam", """
using yfc2 ;
""")
t.run_build_system("-n -d2 yfc2")
t.fail_test(string.find(t.stdout(), "yfc2-link") == -1)
t.cleanup()
|
apache-2.0
|
Bredgren/GenericGameEngine
|
python/gge/Button.py
|
1
|
2790
|
from gge.GameObject import GameObject
from gge.InputAttribute import InputAttribute
from gge.Attribute import SingletonAttribute
import gge.Types as T
import gge.DisplayTypes as DT
class MouseWithin(SingletonAttribute): pass
class MouseDown(SingletonAttribute): pass
class ShapeButton(GameObject):
def __init__(self, gge):
super(ShapeButton, self).__init__(gge)
self.__layer = DT.Layer("hud", 0)
self.__shape = DT.Shape()
self.__text = DT.Text()
self.__display_info = DT.DisplayRepType(layer=self.__layer,
shapes=[self.__shape],
text=[self.__text])
self.setAttribute(DT.DisplayRep, value=self.__display_info)
self.setAttribute(MouseWithin, value=False)
self.setAttribute(MouseDown, value=False)
input_object = gge.getInputObject()
input_attribute = input_object.getAttribute(InputAttribute)
input_attribute.newListener(self.__inputListener)
def setLayer(self, name, number):
self.__layer.name = name
self.__layer.number = number
def setText(self, text):
self.__text.text = text
def setFont(self, font):
self.__text.font = font
def setFontSize(self, size):
self.__text.size = size
def setFontColor(self, color):
self.__text.color = color
def setTextOffset(self, offset):
self.__text.offset = offset
def setShapeType(self, shape):
self.__shape.shape = shape
def setFillColor(self, color):
self.__shape.color.fill = color
def setLineColor(self, color):
self.__shape.color.line = color
def setShapeSize(self, size):
self.__shape.size = size
def setLineWidth(self, width):
self.__shape.lineWidth = width
def setShapePoints(self, points):
self.__shape.points = points
def setShapeOffset(self, offset):
self.__shape.offset = offset
def __inputListener(self, key, value):
if key == "mouse pos":
pos = self.getAttributeValue(T.Position)
size = self.__shape.size
if (pos.x <= value[0] <= pos.x + size.w and
pos.y <= value[1] <= pos.y + size.h):
self.setAttribute(MouseWithin, value=True)
else:
self.setAttribute(MouseWithin, value=False)
elif key == "mouse 1":
if self.getAttributeValue(MouseWithin):
if value:
self.setAttribute(MouseDown, value=True)
else:
self.setAttribute(MouseDown, value=False)
class ImageButton(GameObject):
pass
|
gpl-3.0
|
mjtamlyn/django
|
tests/model_fields/test_genericipaddressfield.py
|
171
|
1475
|
from django.core.exceptions import ValidationError
from django.db import models
from django.test import TestCase
from .models import GenericIPAddress
class GenericIPAddressFieldTests(TestCase):
def test_genericipaddressfield_formfield_protocol(self):
"""
GenericIPAddressField with a specified protocol does not generate a
formfield without a protocol.
"""
model_field = models.GenericIPAddressField(protocol='IPv4')
form_field = model_field.formfield()
with self.assertRaises(ValidationError):
form_field.clean('::1')
model_field = models.GenericIPAddressField(protocol='IPv6')
form_field = model_field.formfield()
with self.assertRaises(ValidationError):
form_field.clean('127.0.0.1')
def test_null_value(self):
"""
Null values should be resolved to None.
"""
GenericIPAddress.objects.create()
o = GenericIPAddress.objects.get()
self.assertIsNone(o.ip)
def test_blank_string_saved_as_null(self):
o = GenericIPAddress.objects.create(ip='')
o.refresh_from_db()
self.assertIsNone(o.ip)
GenericIPAddress.objects.update(ip='')
o.refresh_from_db()
self.assertIsNone(o.ip)
def test_save_load(self):
instance = GenericIPAddress.objects.create(ip='::1')
loaded = GenericIPAddress.objects.get()
self.assertEqual(loaded.ip, instance.ip)
|
bsd-3-clause
|
salguarnieri/intellij-community
|
python/lib/Lib/xmllib.py
|
160
|
34848
|
"""A parser for XML, using the derived class as static DTD."""
# Author: Sjoerd Mullender.
import re
import string
import warnings
warnings.warn("The xmllib module is obsolete. Use xml.sax instead.", DeprecationWarning)
del warnings
version = '0.3'
class Error(RuntimeError):
pass
# Regular expressions used for parsing
_S = '[ \t\r\n]+' # white space
_opS = '[ \t\r\n]*' # optional white space
_Name = '[a-zA-Z_:][-a-zA-Z0-9._:]*' # valid XML name
_QStr = "(?:'[^']*'|\"[^\"]*\")" # quoted XML string
illegal = re.compile('[^\t\r\n -\176\240-\377]') # illegal chars in content
interesting = re.compile('[]&<]')
amp = re.compile('&')
ref = re.compile('&(' + _Name + '|#[0-9]+|#x[0-9a-fA-F]+)[^-a-zA-Z0-9._:]')
entityref = re.compile('&(?P<name>' + _Name + ')[^-a-zA-Z0-9._:]')
charref = re.compile('&#(?P<char>[0-9]+[^0-9]|x[0-9a-fA-F]+[^0-9a-fA-F])')
space = re.compile(_S + '$')
newline = re.compile('\n')
attrfind = re.compile(
_S + '(?P<name>' + _Name + ')'
'(' + _opS + '=' + _opS +
'(?P<value>'+_QStr+'|[-a-zA-Z0-9.:+*%?!\(\)_#=~]+))?')
starttagopen = re.compile('<' + _Name)
starttagend = re.compile(_opS + '(?P<slash>/?)>')
starttagmatch = re.compile('<(?P<tagname>'+_Name+')'
'(?P<attrs>(?:'+attrfind.pattern+')*)'+
starttagend.pattern)
endtagopen = re.compile('</')
endbracket = re.compile(_opS + '>')
endbracketfind = re.compile('(?:[^>\'"]|'+_QStr+')*>')
tagfind = re.compile(_Name)
cdataopen = re.compile(r'<!\[CDATA\[')
cdataclose = re.compile(r'\]\]>')
# this matches one of the following:
# SYSTEM SystemLiteral
# PUBLIC PubidLiteral SystemLiteral
_SystemLiteral = '(?P<%s>'+_QStr+')'
_PublicLiteral = '(?P<%s>"[-\'\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*"|' \
"'[-\(\)+,./:=?;!*#@$_%% \n\ra-zA-Z0-9]*')"
_ExternalId = '(?:SYSTEM|' \
'PUBLIC'+_S+_PublicLiteral%'pubid'+ \
')'+_S+_SystemLiteral%'syslit'
doctype = re.compile('<!DOCTYPE'+_S+'(?P<name>'+_Name+')'
'(?:'+_S+_ExternalId+')?'+_opS)
xmldecl = re.compile('<\?xml'+_S+
'version'+_opS+'='+_opS+'(?P<version>'+_QStr+')'+
'(?:'+_S+'encoding'+_opS+'='+_opS+
"(?P<encoding>'[A-Za-z][-A-Za-z0-9._]*'|"
'"[A-Za-z][-A-Za-z0-9._]*"))?'
'(?:'+_S+'standalone'+_opS+'='+_opS+
'(?P<standalone>\'(?:yes|no)\'|"(?:yes|no)"))?'+
_opS+'\?>')
procopen = re.compile(r'<\?(?P<proc>' + _Name + ')' + _opS)
procclose = re.compile(_opS + r'\?>')
commentopen = re.compile('<!--')
commentclose = re.compile('-->')
doubledash = re.compile('--')
attrtrans = string.maketrans(' \r\n\t', ' ')
# definitions for XML namespaces
_NCName = '[a-zA-Z_][-a-zA-Z0-9._]*' # XML Name, minus the ":"
ncname = re.compile(_NCName + '$')
qname = re.compile('(?:(?P<prefix>' + _NCName + '):)?' # optional prefix
'(?P<local>' + _NCName + ')$')
xmlns = re.compile('xmlns(?::(?P<ncname>'+_NCName+'))?$')
# XML parser base class -- find tags and call handler functions.
# Usage: p = XMLParser(); p.feed(data); ...; p.close().
# The dtd is defined by deriving a class which defines methods with
# special names to handle tags: start_foo and end_foo to handle <foo>
# and </foo>, respectively. The data between tags is passed to the
# parser by calling self.handle_data() with some data as argument (the
# data may be split up in arbitrary chunks).
class XMLParser:
attributes = {} # default, to be overridden
elements = {} # default, to be overridden
# parsing options, settable using keyword args in __init__
__accept_unquoted_attributes = 0
__accept_missing_endtag_name = 0
__map_case = 0
__accept_utf8 = 0
__translate_attribute_references = 1
# Interface -- initialize and reset this instance
def __init__(self, **kw):
self.__fixed = 0
if 'accept_unquoted_attributes' in kw:
self.__accept_unquoted_attributes = kw['accept_unquoted_attributes']
if 'accept_missing_endtag_name' in kw:
self.__accept_missing_endtag_name = kw['accept_missing_endtag_name']
if 'map_case' in kw:
self.__map_case = kw['map_case']
if 'accept_utf8' in kw:
self.__accept_utf8 = kw['accept_utf8']
if 'translate_attribute_references' in kw:
self.__translate_attribute_references = kw['translate_attribute_references']
self.reset()
def __fixelements(self):
self.__fixed = 1
self.elements = {}
self.__fixdict(self.__dict__)
self.__fixclass(self.__class__)
def __fixclass(self, kl):
self.__fixdict(kl.__dict__)
for k in kl.__bases__:
self.__fixclass(k)
def __fixdict(self, dict):
for key in dict.keys():
if key[:6] == 'start_':
tag = key[6:]
start, end = self.elements.get(tag, (None, None))
if start is None:
self.elements[tag] = getattr(self, key), end
elif key[:4] == 'end_':
tag = key[4:]
start, end = self.elements.get(tag, (None, None))
if end is None:
self.elements[tag] = start, getattr(self, key)
# Interface -- reset this instance. Loses all unprocessed data
def reset(self):
self.rawdata = ''
self.stack = []
self.nomoretags = 0
self.literal = 0
self.lineno = 1
self.__at_start = 1
self.__seen_doctype = None
self.__seen_starttag = 0
self.__use_namespaces = 0
self.__namespaces = {'xml':None} # xml is implicitly declared
# backward compatibility hack: if elements not overridden,
# fill it in ourselves
if self.elements is XMLParser.elements:
self.__fixelements()
# For derived classes only -- enter literal mode (CDATA) till EOF
def setnomoretags(self):
self.nomoretags = self.literal = 1
# For derived classes only -- enter literal mode (CDATA)
def setliteral(self, *args):
self.literal = 1
# Interface -- feed some data to the parser. Call this as
# often as you want, with as little or as much text as you
# want (may include '\n'). (This just saves the text, all the
# processing is done by goahead().)
def feed(self, data):
self.rawdata = self.rawdata + data
self.goahead(0)
# Interface -- handle the remaining data
def close(self):
self.goahead(1)
if self.__fixed:
self.__fixed = 0
# remove self.elements so that we don't leak
del self.elements
# Interface -- translate references
def translate_references(self, data, all = 1):
if not self.__translate_attribute_references:
return data
i = 0
while 1:
res = amp.search(data, i)
if res is None:
return data
s = res.start(0)
res = ref.match(data, s)
if res is None:
self.syntax_error("bogus `&'")
i = s+1
continue
i = res.end(0)
str = res.group(1)
rescan = 0
if str[0] == '#':
if str[1] == 'x':
str = chr(int(str[2:], 16))
else:
str = chr(int(str[1:]))
if data[i - 1] != ';':
self.syntax_error("`;' missing after char reference")
i = i-1
elif all:
if str in self.entitydefs:
str = self.entitydefs[str]
rescan = 1
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
else:
self.syntax_error("reference to unknown entity `&%s;'" % str)
str = '&' + str + ';'
elif data[i - 1] != ';':
self.syntax_error("bogus `&'")
i = s + 1 # just past the &
continue
# when we get here, str contains the translated text and i points
# to the end of the string that is to be replaced
data = data[:s] + str + data[i:]
if rescan:
i = s
else:
i = s + len(str)
# Interface - return a dictionary of all namespaces currently valid
def getnamespace(self):
nsdict = {}
for t, d, nst in self.stack:
nsdict.update(d)
return nsdict
# Internal -- handle data as far as reasonable. May leave state
# and data to be processed by a subsequent call. If 'end' is
# true, force handling all data as if followed by EOF marker.
def goahead(self, end):
rawdata = self.rawdata
i = 0
n = len(rawdata)
while i < n:
if i > 0:
self.__at_start = 0
if self.nomoretags:
data = rawdata[i:n]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = n
break
res = interesting.search(rawdata, i)
if res:
j = res.start(0)
else:
j = n
if i < j:
data = rawdata[i:j]
if self.__at_start and space.match(data) is None:
self.syntax_error('illegal data at start of file')
self.__at_start = 0
if not self.stack and space.match(data) is None:
self.syntax_error('data not in content')
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = j
if i == n: break
if rawdata[i] == '<':
if starttagopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_starttag(i)
if k < 0: break
self.__seen_starttag = 1
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if endtagopen.match(rawdata, i):
k = self.parse_endtag(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if commentopen.match(rawdata, i):
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
k = self.parse_comment(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
if cdataopen.match(rawdata, i):
k = self.parse_cdata(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = xmldecl.match(rawdata, i)
if res:
if not self.__at_start:
self.syntax_error("<?xml?> declaration not at start of document")
version, encoding, standalone = res.group('version',
'encoding',
'standalone')
if version[1:-1] != '1.0':
raise Error('only XML version 1.0 supported')
if encoding: encoding = encoding[1:-1]
if standalone: standalone = standalone[1:-1]
self.handle_xml(encoding, standalone)
i = res.end(0)
continue
res = procopen.match(rawdata, i)
if res:
k = self.parse_proc(i)
if k < 0: break
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
res = doctype.match(rawdata, i)
if res:
if self.literal:
data = rawdata[i]
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
i = i+1
continue
if self.__seen_doctype:
self.syntax_error('multiple DOCTYPE elements')
if self.__seen_starttag:
self.syntax_error('DOCTYPE not at beginning of document')
k = self.parse_doctype(res)
if k < 0: break
self.__seen_doctype = res.group('name')
if self.__map_case:
self.__seen_doctype = self.__seen_doctype.lower()
self.lineno = self.lineno + rawdata[i:k].count('\n')
i = k
continue
elif rawdata[i] == '&':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
res = charref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in charref")
i = i-1
if not self.stack:
self.syntax_error('data not in content')
self.handle_charref(res.group('char')[:-1])
self.lineno = self.lineno + res.group(0).count('\n')
continue
res = entityref.match(rawdata, i)
if res is not None:
i = res.end(0)
if rawdata[i-1] != ';':
self.syntax_error("`;' missing in entityref")
i = i-1
name = res.group('name')
if self.__map_case:
name = name.lower()
if name in self.entitydefs:
self.rawdata = rawdata = rawdata[:res.start(0)] + self.entitydefs[name] + rawdata[i:]
n = len(rawdata)
i = res.start(0)
else:
self.unknown_entityref(name)
self.lineno = self.lineno + res.group(0).count('\n')
continue
elif rawdata[i] == ']':
if self.literal:
data = rawdata[i]
self.handle_data(data)
i = i+1
continue
if n-i < 3:
break
if cdataclose.match(rawdata, i):
self.syntax_error("bogus `]]>'")
self.handle_data(rawdata[i])
i = i+1
continue
else:
raise Error('neither < nor & ??')
# We get here only if incomplete matches but
# nothing else
break
# end while
if i > 0:
self.__at_start = 0
if end and i < n:
data = rawdata[i]
self.syntax_error("bogus `%s'" % data)
if not self.__accept_utf8 and illegal.search(data):
self.syntax_error('illegal character in content')
self.handle_data(data)
self.lineno = self.lineno + data.count('\n')
self.rawdata = rawdata[i+1:]
return self.goahead(end)
self.rawdata = rawdata[i:]
if end:
if not self.__seen_starttag:
self.syntax_error('no elements in file')
if self.stack:
self.syntax_error('missing end tags')
while self.stack:
self.finish_endtag(self.stack[-1][0])
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
raise Error('unexpected call to handle_comment')
res = commentclose.search(rawdata, i+4)
if res is None:
return -1
if doubledash.search(rawdata, i+4, res.start(0)):
self.syntax_error("`--' inside comment")
if rawdata[res.start(0)-1] == '-':
self.syntax_error('comment cannot end in three dashes')
if not self.__accept_utf8 and \
illegal.search(rawdata, i+4, res.start(0)):
self.syntax_error('illegal character in comment')
self.handle_comment(rawdata[i+4: res.start(0)])
return res.end(0)
# Internal -- handle DOCTYPE tag, return length or -1 if not terminated
def parse_doctype(self, res):
rawdata = self.rawdata
n = len(rawdata)
name = res.group('name')
if self.__map_case:
name = name.lower()
pubid, syslit = res.group('pubid', 'syslit')
if pubid is not None:
pubid = pubid[1:-1] # remove quotes
pubid = ' '.join(pubid.split()) # normalize
if syslit is not None: syslit = syslit[1:-1] # remove quotes
j = k = res.end(0)
if k >= n:
return -1
if rawdata[k] == '[':
level = 0
k = k+1
dq = sq = 0
while k < n:
c = rawdata[k]
if not sq and c == '"':
dq = not dq
elif not dq and c == "'":
sq = not sq
elif sq or dq:
pass
elif level <= 0 and c == ']':
res = endbracket.match(rawdata, k+1)
if res is None:
return -1
self.handle_doctype(name, pubid, syslit, rawdata[j+1:k])
return res.end(0)
elif c == '<':
level = level + 1
elif c == '>':
level = level - 1
if level < 0:
self.syntax_error("bogus `>' in DOCTYPE")
k = k+1
res = endbracketfind.match(rawdata, k)
if res is None:
return -1
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in DOCTYPE')
self.handle_doctype(name, pubid, syslit, None)
return res.end(0)
# Internal -- handle CDATA tag, return length or -1 if not terminated
def parse_cdata(self, i):
rawdata = self.rawdata
if rawdata[i:i+9] != '<![CDATA[':
raise Error('unexpected call to parse_cdata')
res = cdataclose.search(rawdata, i+9)
if res is None:
return -1
if not self.__accept_utf8 and \
illegal.search(rawdata, i+9, res.start(0)):
self.syntax_error('illegal character in CDATA')
if not self.stack:
self.syntax_error('CDATA not in content')
self.handle_cdata(rawdata[i+9:res.start(0)])
return res.end(0)
__xml_namespace_attributes = {'ns':None, 'src':None, 'prefix':None}
# Internal -- handle a processing instruction tag
def parse_proc(self, i):
rawdata = self.rawdata
end = procclose.search(rawdata, i)
if end is None:
return -1
j = end.start(0)
if not self.__accept_utf8 and illegal.search(rawdata, i+2, j):
self.syntax_error('illegal character in processing instruction')
res = tagfind.match(rawdata, i+2)
if res is None:
raise Error('unexpected call to parse_proc')
k = res.end(0)
name = res.group(0)
if self.__map_case:
name = name.lower()
if name == 'xml:namespace':
self.syntax_error('old-fashioned namespace declaration')
self.__use_namespaces = -1
# namespace declaration
# this must come after the <?xml?> declaration (if any)
# and before the <!DOCTYPE> (if any).
if self.__seen_doctype or self.__seen_starttag:
self.syntax_error('xml:namespace declaration too late in document')
attrdict, namespace, k = self.parse_attributes(name, k, j)
if namespace:
self.syntax_error('namespace declaration inside namespace declaration')
for attrname in attrdict.keys():
if not attrname in self.__xml_namespace_attributes:
self.syntax_error("unknown attribute `%s' in xml:namespace tag" % attrname)
if not 'ns' in attrdict or not 'prefix' in attrdict:
self.syntax_error('xml:namespace without required attributes')
prefix = attrdict.get('prefix')
if ncname.match(prefix) is None:
self.syntax_error('xml:namespace illegal prefix value')
return end.end(0)
if prefix in self.__namespaces:
self.syntax_error('xml:namespace prefix not unique')
self.__namespaces[prefix] = attrdict['ns']
else:
if name.lower() == 'xml':
self.syntax_error('illegal processing instruction target name')
self.handle_proc(name, rawdata[k:j])
return end.end(0)
# Internal -- parse attributes between i and j
def parse_attributes(self, tag, i, j):
rawdata = self.rawdata
attrdict = {}
namespace = {}
while i < j:
res = attrfind.match(rawdata, i)
if res is None:
break
attrname, attrvalue = res.group('name', 'value')
if self.__map_case:
attrname = attrname.lower()
i = res.end(0)
if attrvalue is None:
self.syntax_error("no value specified for attribute `%s'" % attrname)
attrvalue = attrname
elif attrvalue[:1] == "'" == attrvalue[-1:] or \
attrvalue[:1] == '"' == attrvalue[-1:]:
attrvalue = attrvalue[1:-1]
elif not self.__accept_unquoted_attributes:
self.syntax_error("attribute `%s' value not quoted" % attrname)
res = xmlns.match(attrname)
if res is not None:
# namespace declaration
ncname = res.group('ncname')
namespace[ncname or ''] = attrvalue or None
if not self.__use_namespaces:
self.__use_namespaces = len(self.stack)+1
continue
if '<' in attrvalue:
self.syntax_error("`<' illegal in attribute value")
if attrname in attrdict:
self.syntax_error("attribute `%s' specified twice" % attrname)
attrvalue = attrvalue.translate(attrtrans)
attrdict[attrname] = self.translate_references(attrvalue)
return attrdict, namespace, i
# Internal -- handle starttag, return length or -1 if not terminated
def parse_starttag(self, i):
rawdata = self.rawdata
# i points to start of tag
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
tag = starttagmatch.match(rawdata, i)
if tag is None or tag.end(0) != end.end(0):
self.syntax_error('garbage in starttag')
return end.end(0)
nstag = tagname = tag.group('tagname')
if self.__map_case:
nstag = tagname = nstag.lower()
if not self.__seen_starttag and self.__seen_doctype and \
tagname != self.__seen_doctype:
self.syntax_error('starttag does not match DOCTYPE')
if self.__seen_starttag and not self.stack:
self.syntax_error('multiple elements on top level')
k, j = tag.span('attrs')
attrdict, nsdict, k = self.parse_attributes(tagname, k, j)
self.stack.append((tagname, nsdict, nstag))
if self.__use_namespaces:
res = qname.match(tagname)
else:
res = None
if res is not None:
prefix, nstag = res.group('prefix', 'local')
if prefix is None:
prefix = ''
ns = None
for t, d, nst in self.stack:
if prefix in d:
ns = d[prefix]
if ns is None and prefix != '':
ns = self.__namespaces.get(prefix)
if ns is not None:
nstag = ns + ' ' + nstag
elif prefix != '':
nstag = prefix + ':' + nstag # undo split
self.stack[-1] = tagname, nsdict, nstag
# translate namespace of attributes
attrnamemap = {} # map from new name to old name (used for error reporting)
for key in attrdict.keys():
attrnamemap[key] = key
if self.__use_namespaces:
nattrdict = {}
for key, val in attrdict.items():
okey = key
res = qname.match(key)
if res is not None:
aprefix, key = res.group('prefix', 'local')
if self.__map_case:
key = key.lower()
if aprefix is not None:
ans = None
for t, d, nst in self.stack:
if aprefix in d:
ans = d[aprefix]
if ans is None:
ans = self.__namespaces.get(aprefix)
if ans is not None:
key = ans + ' ' + key
else:
key = aprefix + ':' + key
nattrdict[key] = val
attrnamemap[key] = okey
attrdict = nattrdict
attributes = self.attributes.get(nstag)
if attributes is not None:
for key in attrdict.keys():
if not key in attributes:
self.syntax_error("unknown attribute `%s' in tag `%s'" % (attrnamemap[key], tagname))
for key, val in attributes.items():
if val is not None and not key in attrdict:
attrdict[key] = val
method = self.elements.get(nstag, (None, None))[0]
self.finish_starttag(nstag, attrdict, method)
if tag.group('slash') == '/':
self.finish_endtag(tagname)
return tag.end(0)
# Internal -- parse endtag
def parse_endtag(self, i):
rawdata = self.rawdata
end = endbracketfind.match(rawdata, i+1)
if end is None:
return -1
res = tagfind.match(rawdata, i+2)
if res is None:
if self.literal:
self.handle_data(rawdata[i])
return i+1
if not self.__accept_missing_endtag_name:
self.syntax_error('no name specified in end tag')
tag = self.stack[-1][0]
k = i+2
else:
tag = res.group(0)
if self.__map_case:
tag = tag.lower()
if self.literal:
if not self.stack or tag != self.stack[-1][0]:
self.handle_data(rawdata[i])
return i+1
k = res.end(0)
if endbracket.match(rawdata, k) is None:
self.syntax_error('garbage in end tag')
self.finish_endtag(tag)
return end.end(0)
# Internal -- finish processing of start tag
def finish_starttag(self, tagname, attrdict, method):
if method is not None:
self.handle_starttag(tagname, method, attrdict)
else:
self.unknown_starttag(tagname, attrdict)
# Internal -- finish processing of end tag
def finish_endtag(self, tag):
self.literal = 0
if not tag:
self.syntax_error('name-less end tag')
found = len(self.stack) - 1
if found < 0:
self.unknown_endtag(tag)
return
else:
found = -1
for i in range(len(self.stack)):
if tag == self.stack[i][0]:
found = i
if found == -1:
self.syntax_error('unopened end tag')
return
while len(self.stack) > found:
if found < len(self.stack) - 1:
self.syntax_error('missing close tag for %s' % self.stack[-1][2])
nstag = self.stack[-1][2]
method = self.elements.get(nstag, (None, None))[1]
if method is not None:
self.handle_endtag(nstag, method)
else:
self.unknown_endtag(nstag)
if self.__use_namespaces == len(self.stack):
self.__use_namespaces = 0
del self.stack[-1]
# Overridable -- handle xml processing instruction
def handle_xml(self, encoding, standalone):
pass
# Overridable -- handle DOCTYPE
def handle_doctype(self, tag, pubid, syslit, data):
pass
# Overridable -- handle start tag
def handle_starttag(self, tag, method, attrs):
method(attrs)
# Overridable -- handle end tag
def handle_endtag(self, tag, method):
method()
# Example -- handle character reference, no need to override
def handle_charref(self, name):
try:
if name[0] == 'x':
n = int(name[1:], 16)
else:
n = int(name)
except ValueError:
self.unknown_charref(name)
return
if not 0 <= n <= 255:
self.unknown_charref(name)
return
self.handle_data(chr(n))
# Definition of entities -- derived classes may override
entitydefs = {'lt': '<', # must use charref
'gt': '>',
'amp': '&', # must use charref
'quot': '"',
'apos': ''',
}
# Example -- handle data, should be overridden
def handle_data(self, data):
pass
# Example -- handle cdata, could be overridden
def handle_cdata(self, data):
pass
# Example -- handle comment, could be overridden
def handle_comment(self, data):
pass
# Example -- handle processing instructions, could be overridden
def handle_proc(self, name, data):
pass
# Example -- handle relatively harmless syntax errors, could be overridden
def syntax_error(self, message):
raise Error('Syntax error at line %d: %s' % (self.lineno, message))
# To be overridden -- handlers for unknown objects
def unknown_starttag(self, tag, attrs): pass
def unknown_endtag(self, tag): pass
def unknown_charref(self, ref): pass
def unknown_entityref(self, name):
self.syntax_error("reference to unknown entity `&%s;'" % name)
class TestXMLParser(XMLParser):
def __init__(self, **kw):
self.testdata = ""
XMLParser.__init__(self, **kw)
def handle_xml(self, encoding, standalone):
self.flush()
print 'xml: encoding =',encoding,'standalone =',standalone
def handle_doctype(self, tag, pubid, syslit, data):
self.flush()
print 'DOCTYPE:',tag, repr(data)
def handle_data(self, data):
self.testdata = self.testdata + data
if len(repr(self.testdata)) >= 70:
self.flush()
def flush(self):
data = self.testdata
if data:
self.testdata = ""
print 'data:', repr(data)
def handle_cdata(self, data):
self.flush()
print 'cdata:', repr(data)
def handle_proc(self, name, data):
self.flush()
print 'processing:',name,repr(data)
def handle_comment(self, data):
self.flush()
r = repr(data)
if len(r) > 68:
r = r[:32] + '...' + r[-32:]
print 'comment:', r
def syntax_error(self, message):
print 'error at line %d:' % self.lineno, message
def unknown_starttag(self, tag, attrs):
self.flush()
if not attrs:
print 'start tag: <' + tag + '>'
else:
print 'start tag: <' + tag,
for name, value in attrs.items():
print name + '=' + '"' + value + '"',
print '>'
def unknown_endtag(self, tag):
self.flush()
print 'end tag: </' + tag + '>'
def unknown_entityref(self, ref):
self.flush()
print '*** unknown entity ref: &' + ref + ';'
def unknown_charref(self, ref):
self.flush()
print '*** unknown char ref: &#' + ref + ';'
def close(self):
XMLParser.close(self)
self.flush()
def test(args = None):
import sys, getopt
from time import time
if not args:
args = sys.argv[1:]
opts, args = getopt.getopt(args, 'st')
klass = TestXMLParser
do_time = 0
for o, a in opts:
if o == '-s':
klass = XMLParser
elif o == '-t':
do_time = 1
if args:
file = args[0]
else:
file = 'test.xml'
if file == '-':
f = sys.stdin
else:
try:
f = open(file, 'r')
except IOError, msg:
print file, ":", msg
sys.exit(1)
data = f.read()
if f is not sys.stdin:
f.close()
x = klass()
t0 = time()
try:
if do_time:
x.feed(data)
x.close()
else:
for c in data:
x.feed(c)
x.close()
except Error, msg:
t1 = time()
print msg
if do_time:
print 'total time: %g' % (t1-t0)
sys.exit(1)
t1 = time()
if do_time:
print 'total time: %g' % (t1-t0)
if __name__ == '__main__':
test()
|
apache-2.0
|
orhanf/fuel
|
tests/test_schemes.py
|
21
|
7676
|
import numpy
from numpy.testing import assert_raises
from fuel.schemes import (ConstantScheme, SequentialExampleScheme,
SequentialScheme, ShuffledExampleScheme,
ShuffledScheme, ConcatenatedScheme,
cross_validation)
def iterator_requester(scheme):
def get_request_iterator(*args, **kwargs):
scheme_obj = scheme(*args, **kwargs)
return scheme_obj.get_request_iterator()
return get_request_iterator
def test_constant_scheme():
get_request_iterator = iterator_requester(ConstantScheme)
assert list(get_request_iterator(3, num_examples=7)) == [3, 3, 1]
assert list(get_request_iterator(3, num_examples=9)) == [3, 3, 3]
assert list(get_request_iterator(3, num_examples=2)) == [2]
assert list(get_request_iterator(2, times=3)) == [2, 2, 2]
assert list(get_request_iterator(3, times=1)) == [3]
it = get_request_iterator(3)
assert [next(it) == 3 for _ in range(10)]
assert_raises(ValueError, get_request_iterator, 10, 2, 2)
assert not ConstantScheme(3, 3).requests_examples
def test_sequential_scheme():
get_request_iterator = iterator_requester(SequentialScheme)
assert list(get_request_iterator(5, 3)) == [[0, 1, 2], [3, 4]]
assert list(get_request_iterator(4, 2)) == [[0, 1], [2, 3]]
assert list(get_request_iterator(
[4, 3, 2, 1, 0], 3)) == [[4, 3, 2], [1, 0]]
assert list(get_request_iterator(
[3, 2, 1, 0], 2)) == [[3, 2], [1, 0]]
assert not SequentialScheme(3, 3).requests_examples
def test_shuffled_scheme_sorted_indices():
get_request_iterator = iterator_requester(ShuffledScheme)
indices = list(range(7))
rng = numpy.random.RandomState(3)
test_rng = numpy.random.RandomState(3)
test_rng.shuffle(indices)
assert list(get_request_iterator(7, 3, rng=rng, sorted_indices=True)) == \
[sorted(indices[:3]), sorted(indices[3:6]), sorted(indices[6:])]
assert list(get_request_iterator(7, 3, rng=rng, sorted_indices=True)) != \
[sorted(indices[:3]), sorted(indices[3:6]), sorted(indices[6:])]
indices = list(range(6))[::-1]
expected = indices[:]
rng = numpy.random.RandomState(3)
test_rng = numpy.random.RandomState(3)
test_rng.shuffle(expected)
assert (list(get_request_iterator(indices, 3, rng=rng,
sorted_indices=True)) ==
[sorted(expected[:3]), sorted(expected[3:6])])
def test_shuffled_scheme_unsorted_indices():
get_request_iterator = iterator_requester(ShuffledScheme)
indices = list(range(7))
rng = numpy.random.RandomState(3)
test_rng = numpy.random.RandomState(3)
test_rng.shuffle(indices)
assert list(get_request_iterator(7, 3, rng=rng, sorted_indices=False)) == \
[indices[:3], indices[3:6], indices[6:]]
assert list(get_request_iterator(7, 3, rng=rng, sorted_indices=False)) != \
[indices[:3], indices[3:6], indices[6:]]
indices = list(range(6))[::-1]
expected = indices[:]
rng = numpy.random.RandomState(3)
test_rng = numpy.random.RandomState(3)
test_rng.shuffle(expected)
assert (list(get_request_iterator(indices, 3, rng=rng,
sorted_indices=False)) ==
[expected[:3], expected[3:6]])
def test_shuffled_scheme_requests_batches():
assert not ShuffledScheme(3, 3).requests_examples
def test_shuffled_example_scheme():
get_request_iterator = iterator_requester(ShuffledExampleScheme)
indices = list(range(7))
rng = numpy.random.RandomState(3)
test_rng = numpy.random.RandomState(3)
test_rng.shuffle(indices)
assert list(get_request_iterator(7, rng=rng)) == indices
def test_shuffled_example_scheme_no_rng():
scheme = ShuffledExampleScheme(7)
assert scheme.rng is not None
def test_shuffled_example_scheme_requests_examples():
assert ShuffledExampleScheme(3).requests_examples
def test_sequential_example_scheme():
get_request_iterator = iterator_requester(SequentialExampleScheme)
assert list(get_request_iterator(7)) == list(range(7))
assert list(get_request_iterator(range(7)[::-1])) == list(range(7)[::-1])
def test_sequential_example_scheme_requests_examples():
assert SequentialExampleScheme(3).requests_examples
def test_concatenated_scheme():
sch = ConcatenatedScheme(schemes=[ConstantScheme(batch_size=10, times=5),
ConstantScheme(batch_size=20, times=3),
ConstantScheme(batch_size=30, times=1)])
assert (list(sch.get_request_iterator()) ==
([10] * 5) + ([20] * 3) + [30])
def test_concatenated_scheme_raises_value_error_on_different_request_types():
assert_raises(ValueError, ConcatenatedScheme,
[ConstantScheme(batch_size=10, times=5),
SequentialExampleScheme(examples=3)])
def test_concatenated_scheme_infers_request_type():
assert not ConcatenatedScheme(
schemes=[ConstantScheme(batch_size=10, times=5),
ConstantScheme(batch_size=10, times=5)]).requests_examples
assert ConcatenatedScheme(
schemes=[SequentialExampleScheme(examples=10),
SequentialExampleScheme(examples=10)]).requests_examples
def test_cross_validation():
# test raise when strict=True
cross = cross_validation(SequentialExampleScheme, 10, 3)
assert_raises(ValueError, next, cross)
# test IndexScheme when strict=False
cross = cross_validation(SequentialExampleScheme, 10, 3, False)
(train, valid, valid_size) = next(cross)
assert list(train.get_request_iterator()) == list(range(3, 10))
assert list(valid.get_request_iterator()) == list(range(0, 3))
# test that indices are not depleted
assert list(train.get_request_iterator()) == list(range(3, 10))
assert list(valid.get_request_iterator()) == list(range(0, 3))
assert valid_size == 3
(train, valid, valid_size) = next(cross)
assert (list(train.get_request_iterator()) ==
list(range(0, 3)) + list(range(6, 10)))
assert list(valid.get_request_iterator()) == list(range(3, 6))
# test that indices are not depleted
assert (list(train.get_request_iterator()) ==
list(range(0, 3)) + list(range(6, 10)))
assert list(valid.get_request_iterator()) == list(range(3, 6))
assert valid_size == 3
(train, valid, valid_size) = next(cross)
assert list(train.get_request_iterator()) == list(range(0, 6))
assert list(valid.get_request_iterator()) == list(range(6, 10))
# test that indices are not depleted
assert list(train.get_request_iterator()) == list(range(0, 6))
assert list(valid.get_request_iterator()) == list(range(6, 10))
assert valid_size == 4
assert_raises(StopIteration, next, cross)
# test BatchScheme
cross = cross_validation(SequentialScheme, 8, 2, batch_size=2)
(train, valid) = next(cross)
assert list(train.get_request_iterator()) == [[4, 5], [6, 7]]
assert list(valid.get_request_iterator()) == [[0, 1], [2, 3]]
# test that indices are not depleted
assert list(train.get_request_iterator()) == [[4, 5], [6, 7]]
assert list(valid.get_request_iterator()) == [[0, 1], [2, 3]]
(train, valid) = next(cross)
assert list(train.get_request_iterator()) == [[0, 1], [2, 3]]
assert list(valid.get_request_iterator()) == [[4, 5], [6, 7]]
# test that indices are not depleted
assert list(train.get_request_iterator()) == [[0, 1], [2, 3]]
assert list(valid.get_request_iterator()) == [[4, 5], [6, 7]]
assert_raises(StopIteration, next, cross)
|
mit
|
keen99/SickRage
|
lib/pyasn1/type/tagmap.py
|
200
|
1772
|
from pyasn1 import error
class TagMap:
def __init__(self, posMap={}, negMap={}, defType=None):
self.__posMap = posMap.copy()
self.__negMap = negMap.copy()
self.__defType = defType
def __contains__(self, tagSet):
return tagSet in self.__posMap or \
self.__defType is not None and tagSet not in self.__negMap
def __getitem__(self, tagSet):
if tagSet in self.__posMap:
return self.__posMap[tagSet]
elif tagSet in self.__negMap:
raise error.PyAsn1Error('Key in negative map')
elif self.__defType is not None:
return self.__defType
else:
raise KeyError()
def __repr__(self):
s = '%r/%r' % (self.__posMap, self.__negMap)
if self.__defType is not None:
s = s + '/%r' % (self.__defType,)
return s
def clone(self, parentType, tagMap, uniq=False):
if self.__defType is not None and tagMap.getDef() is not None:
raise error.PyAsn1Error('Duplicate default value at %s' % (self,))
if tagMap.getDef() is not None:
defType = tagMap.getDef()
else:
defType = self.__defType
posMap = self.__posMap.copy()
for k in tagMap.getPosMap():
if uniq and k in posMap:
raise error.PyAsn1Error('Duplicate positive key %s' % (k,))
posMap[k] = parentType
negMap = self.__negMap.copy()
negMap.update(tagMap.getNegMap())
return self.__class__(
posMap, negMap, defType,
)
def getPosMap(self): return self.__posMap.copy()
def getNegMap(self): return self.__negMap.copy()
def getDef(self): return self.__defType
|
gpl-3.0
|
dydek/django
|
django/views/decorators/vary.py
|
586
|
1200
|
from functools import wraps
from django.utils.cache import patch_vary_headers
from django.utils.decorators import available_attrs
def vary_on_headers(*headers):
"""
A view decorator that adds the specified headers to the Vary header of the
response. Usage:
@vary_on_headers('Cookie', 'Accept-language')
def index(request):
...
Note that the header names are not case-sensitive.
"""
def decorator(func):
@wraps(func, assigned=available_attrs(func))
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, headers)
return response
return inner_func
return decorator
def vary_on_cookie(func):
"""
A view decorator that adds "Cookie" to the Vary header of a response. This
indicates that a page's contents depends on cookies. Usage:
@vary_on_cookie
def index(request):
...
"""
@wraps(func, assigned=available_attrs(func))
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, ('Cookie',))
return response
return inner_func
|
bsd-3-clause
|
albertomurillo/ansible
|
test/units/modules/network/nso/test_nso_action.py
|
40
|
7714
|
#
# Copyright (c) 2017 Cisco and/or its affiliates.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
import json
from units.compat.mock import patch
from ansible.modules.network.nso import nso_action
from . import nso_module
from .nso_module import MockResponse
from units.modules.utils import set_module_args
class TestNsoAction(nso_module.TestNsoModule):
module = nso_action
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_action_missing(self, open_url_mock):
action_input = {}
path = '/ncs:devices/device{ce0}/missing'
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': path}, 200, '{"error": {"data": {"param": "path"}, "type": "rpc.method.invalid_params"}}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc',
'path': path,
'input': action_input,
'validate_certs': False
})
self.execute_module(failed=True, msg='NSO get_schema invalid params. path = /ncs:devices/device{ce0}/missing')
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_action_not_action(self, open_url_mock):
action_input = {}
path = '/ncs:devices/device{ce0}/description'
schema = nso_module.load_fixture('description_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': path}, 200, '{"result": %s}' % (json.dumps(schema, ))),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc',
'path': path,
'input': action_input,
'validate_certs': False
})
self.execute_module(failed=True, msg='/ncs:devices/device{ce0}/description is not an action')
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_action_ok(self, open_url_mock):
action_input = {}
path = '/ncs:devices/device{ce0}/sync-from'
output = {"result": True}
schema = nso_module.load_fixture('sync_from_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': path}, 200, '{"result": %s}' % (json.dumps(schema, ))),
MockResponse('run_action', {'path': path, 'params': action_input}, 200, '{"result": {"result": true}}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc',
'path': path,
'input': action_input,
'validate_certs': False
})
self.execute_module(changed=True, output=output)
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_action_validate_ok(self, open_url_mock):
action_input = {}
path = '/test:action'
output = {'version': [{'name': 'v1'}, {'name': 'v2'}]}
schema = nso_module.load_fixture('complex_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': path}, 200, '{"result": %s}' % (json.dumps(schema, ))),
MockResponse('run_action', {'path': path, 'params': action_input}, 200,
'{"result": {"version": [{"name": "v1"}, {"name": "v2"}]}}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc',
'path': path,
'input': action_input,
'output_required': output,
'validate_certs': False
})
self.execute_module(changed=True, output=output)
self.assertEqual(0, len(calls))
@patch('ansible.module_utils.network.nso.nso.open_url')
def test_nso_action_validate_failed(self, open_url_mock):
action_input = {}
path = '/test:action'
output_mismatch = {'version': [{'name': 'v1'}, {'name': 'v3'}]}
schema = nso_module.load_fixture('complex_schema.json')
calls = [
MockResponse('login', {}, 200, '{}', {'set-cookie': 'id'}),
MockResponse('get_system_setting', {'operation': 'version'}, 200, '{"result": "4.5.0"}'),
MockResponse('new_trans', {'mode': 'read'}, 200, '{"result": {"th": 1}}'),
MockResponse('get_schema', {'path': path}, 200, '{"result": %s}' % (json.dumps(schema, ))),
MockResponse('run_action', {'path': path, 'params': action_input}, 200,
'{"result": {"version": [{"name": "v1"}, {"name": "v2"}]}}'),
MockResponse('logout', {}, 200, '{"result": {}}'),
]
open_url_mock.side_effect = lambda *args, **kwargs: nso_module.mock_call(calls, *args, **kwargs)
set_module_args({
'username': 'user', 'password': 'password',
'url': 'http://localhost:8080/jsonrpc',
'path': path,
'input': action_input,
'output_required': output_mismatch,
'validate_certs': False
})
self.execute_module(failed=True, msg="version value mismatch. expected [{'name': 'v1'}, {'name': 'v3'}] got [{'name': 'v1'}, {'name': 'v2'}]")
self.assertEqual(0, len(calls))
|
gpl-3.0
|
markgrovs/alfred-airmail-to-todoist
|
src/lib/requests/packages/chardet/utf8prober.py
|
2919
|
2652
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .mbcssm import UTF8SMModel
ONE_CHAR_PROB = 0.5
class UTF8Prober(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(UTF8SMModel)
self.reset()
def reset(self):
CharSetProber.reset(self)
self._mCodingSM.reset()
self._mNumOfMBChar = 0
def get_charset_name(self):
return "utf-8"
def feed(self, aBuf):
for c in aBuf:
codingState = self._mCodingSM.next_state(c)
if codingState == constants.eError:
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
if self._mCodingSM.get_current_charlen() >= 2:
self._mNumOfMBChar += 1
if self.get_state() == constants.eDetecting:
if self.get_confidence() > constants.SHORTCUT_THRESHOLD:
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
unlike = 0.99
if self._mNumOfMBChar < 6:
for i in range(0, self._mNumOfMBChar):
unlike = unlike * ONE_CHAR_PROB
return 1.0 - unlike
else:
return unlike
|
mit
|
ramanajee/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/port/efl.py
|
117
|
5906
|
# Copyright (C) 2011 ProFUSION Embedded Systems. All rights reserved.
# Copyright (C) 2011 Samsung Electronics. All rights reserved.
# Copyright (C) 2012 Intel Corporation
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""WebKit Efl implementation of the Port interface."""
import os
from webkitpy.layout_tests.models.test_configuration import TestConfiguration
from webkitpy.port.base import Port
from webkitpy.port.pulseaudio_sanitizer import PulseAudioSanitizer
from webkitpy.port.xvfbdriver import XvfbDriver
class EflPort(Port):
port_name = 'efl'
def __init__(self, *args, **kwargs):
super(EflPort, self).__init__(*args, **kwargs)
self._jhbuild_wrapper_path = [self.path_from_webkit_base('Tools', 'jhbuild', 'jhbuild-wrapper'), '--efl', 'run']
self.set_option_default('wrapper', ' '.join(self._jhbuild_wrapper_path))
self.webprocess_cmd_prefix = self.get_option('webprocess_cmd_prefix')
self._pulseaudio_sanitizer = PulseAudioSanitizer()
def _port_flag_for_scripts(self):
return "--efl"
def setup_test_run(self):
super(EflPort, self).setup_test_run()
self._pulseaudio_sanitizer.unload_pulseaudio_module()
def setup_environ_for_server(self, server_name=None):
env = super(EflPort, self).setup_environ_for_server(server_name)
# If DISPLAY environment variable is unset in the system
# e.g. on build bot, remove DISPLAY variable from the dictionary
if not 'DISPLAY' in os.environ:
del env['DISPLAY']
env['TEST_RUNNER_INJECTED_BUNDLE_FILENAME'] = self._build_path('lib', 'libTestRunnerInjectedBundle.so')
env['TEST_RUNNER_PLUGIN_PATH'] = self._build_path('lib')
# Silence GIO warnings about using the "memory" GSettings backend.
env['GSETTINGS_BACKEND'] = 'memory'
if self.webprocess_cmd_prefix:
env['WEB_PROCESS_CMD_PREFIX'] = self.webprocess_cmd_prefix
return env
def default_timeout_ms(self):
# Tests run considerably slower under gdb
# or valgrind.
if self.get_option('webprocess_cmd_prefix'):
return 350 * 1000
return super(EflPort, self).default_timeout_ms()
def clean_up_test_run(self):
super(EflPort, self).clean_up_test_run()
self._pulseaudio_sanitizer.restore_pulseaudio_module()
def _generate_all_test_configurations(self):
return [TestConfiguration(version=self._version, architecture='x86', build_type=build_type) for build_type in self.ALL_BUILD_TYPES]
def _driver_class(self):
return XvfbDriver
def _path_to_driver(self):
return self._build_path('bin', self.driver_name())
def _path_to_image_diff(self):
return self._build_path('bin', 'ImageDiff')
def _image_diff_command(self, *args, **kwargs):
return self._jhbuild_wrapper_path + super(EflPort, self)._image_diff_command(*args, **kwargs)
def _path_to_webcore_library(self):
static_path = self._build_path('lib', 'libwebcore_efl.a')
dyn_path = self._build_path('lib', 'libwebcore_efl.so')
return static_path if self._filesystem.exists(static_path) else dyn_path
def _search_paths(self):
search_paths = []
if self.get_option('webkit_test_runner'):
search_paths.append(self.port_name + '-wk2')
search_paths.append('wk2')
else:
search_paths.append(self.port_name + '-wk1')
search_paths.append(self.port_name)
return search_paths
def default_baseline_search_path(self):
return map(self._webkit_baseline_path, self._search_paths())
def _port_specific_expectations_files(self):
# FIXME: We should be able to use the default algorithm here.
return list(reversed([self._filesystem.join(self._webkit_baseline_path(p), 'TestExpectations') for p in self._search_paths()]))
def show_results_html_file(self, results_filename):
# FIXME: We should find a way to share this implmentation with Gtk,
# or teach run-launcher how to call run-safari and move this down to WebKitPort.
run_launcher_args = ["file://%s" % results_filename]
if self.get_option('webkit_test_runner'):
run_launcher_args.append('-2')
# FIXME: old-run-webkit-tests also added ["-graphicssystem", "raster", "-style", "windows"]
# FIXME: old-run-webkit-tests converted results_filename path for cygwin.
self._run_script("run-launcher", run_launcher_args)
def check_sys_deps(self, needs_http):
return super(EflPort, self).check_sys_deps(needs_http) and XvfbDriver.check_xvfb(self)
|
bsd-3-clause
|
mm1ke/portage
|
pym/portage/data.py
|
6
|
9785
|
# data.py -- Calculated/Discovered Data Values
# Copyright 1998-2014 Gentoo Foundation
# Distributed under the terms of the GNU General Public License v2
import os, pwd, grp, platform, sys
import portage
portage.proxy.lazyimport.lazyimport(globals(),
'portage.output:colorize',
'portage.util:writemsg',
'portage.util.path:first_existing',
'subprocess'
)
from portage.localization import _
ostype = platform.system()
userland = None
if ostype == "DragonFly" or ostype.endswith("BSD"):
userland = "BSD"
else:
userland = "GNU"
lchown = getattr(os, "lchown", None)
if not lchown:
if ostype == "Darwin":
def lchown(*_args, **_kwargs):
pass
else:
def lchown(*_args, **_kwargs):
writemsg(colorize("BAD", "!!!") + _(
" It seems that os.lchown does not"
" exist. Please rebuild python.\n"), noiselevel=-1)
lchown()
lchown = portage._unicode_func_wrapper(lchown)
def _target_eprefix():
"""
Calculate the target EPREFIX, which may be different from
portage.const.EPREFIX due to cross-prefix support. The result
is equivalent to portage.settings["EPREFIX"], but the calculation
is done without the expense of instantiating portage.settings.
@rtype: str
@return: the target EPREFIX
"""
eprefix = os.environ.get("EPREFIX", portage.const.EPREFIX)
if eprefix:
eprefix = portage.util.normalize_path(eprefix)
return eprefix
def _target_root():
"""
Calculate the target ROOT. The result is equivalent to
portage.settings["ROOT"], but the calculation
is done without the expense of instantiating portage.settings.
@rtype: str
@return: the target ROOT (always ends with a slash)
"""
root = os.environ.get("ROOT")
if not root:
# Handle either empty or unset ROOT.
root = os.sep
root = portage.util.normalize_path(root)
return root.rstrip(os.sep) + os.sep
def portage_group_warning():
warn_prefix = colorize("BAD", "*** WARNING *** ")
mylines = [
"For security reasons, only system administrators should be",
"allowed in the portage group. Untrusted users or processes",
"can potentially exploit the portage group for attacks such as",
"local privilege escalation."
]
for x in mylines:
writemsg(warn_prefix, noiselevel=-1)
writemsg(x, noiselevel=-1)
writemsg("\n", noiselevel=-1)
writemsg("\n", noiselevel=-1)
# Portage has 3 security levels that depend on the uid and gid of the main
# process and are assigned according to the following table:
#
# Privileges secpass uid gid
# normal 0 any any
# group 1 any portage_gid
# super 2 0 any
#
# If the "wheel" group does not exist then wheelgid falls back to 0.
# If the "portage" group does not exist then portage_uid falls back to wheelgid.
# If the current user is not root, but has write access to the
# EROOT directory (not due to the 0002 bit), then use "unprivileged"
# mode which sets secpass = 2 and uses the UID and GID of the EROOT
# directory to generate default PORTAGE_INST_GID, PORTAGE_INST_UID,
# PORTAGE_USERNAME, and PORTAGE_GRPNAME settings.
def _unprivileged_mode(eroot, eroot_st):
return os.getuid() != 0 and os.access(eroot, os.W_OK) and \
not eroot_st.st_mode & 0o0002
uid = os.getuid()
wheelgid = 0
try:
wheelgid = grp.getgrnam("wheel")[2]
except KeyError:
pass
# The portage_uid and portage_gid global constants, and others that
# depend on them are initialized lazily, in order to allow configuration
# via make.conf. Eventually, these constants may be deprecated in favor
# of config attributes, since it's conceivable that multiple
# configurations with different constants could be used simultaneously.
_initialized_globals = set()
def _get_global(k):
if k in _initialized_globals:
return globals()[k]
if k == 'secpass':
unprivileged = False
if hasattr(portage, 'settings'):
unprivileged = "unprivileged" in portage.settings.features
else:
# The config class has equivalent code, but we also need to
# do it here if _disable_legacy_globals() has been called.
eroot_or_parent = first_existing(os.path.join(
_target_root(), _target_eprefix().lstrip(os.sep)))
try:
eroot_st = os.stat(eroot_or_parent)
except OSError:
pass
else:
unprivileged = _unprivileged_mode(
eroot_or_parent, eroot_st)
v = 0
if uid == 0:
v = 2
elif unprivileged:
v = 2
elif _get_global('portage_gid') in os.getgroups():
v = 1
elif k in ('portage_gid', 'portage_uid'):
#Discover the uid and gid of the portage user/group
keyerror = False
try:
portage_uid = pwd.getpwnam(_get_global('_portage_username')).pw_uid
except KeyError:
keyerror = True
portage_uid = 0
try:
portage_gid = grp.getgrnam(_get_global('_portage_grpname')).gr_gid
except KeyError:
keyerror = True
portage_gid = 0
# Suppress this error message if both PORTAGE_GRPNAME and
# PORTAGE_USERNAME are set to "root", for things like
# Android (see bug #454060).
if keyerror and not (_get_global('_portage_username') == "root" and
_get_global('_portage_grpname') == "root"):
writemsg(colorize("BAD",
_("portage: 'portage' user or group missing.")) + "\n", noiselevel=-1)
writemsg(_(
" For the defaults, line 1 goes into passwd, "
"and 2 into group.\n"), noiselevel=-1)
writemsg(colorize("GOOD",
" portage:x:250:250:portage:/var/tmp/portage:/bin/false") \
+ "\n", noiselevel=-1)
writemsg(colorize("GOOD", " portage::250:portage") + "\n",
noiselevel=-1)
portage_group_warning()
globals()['portage_gid'] = portage_gid
_initialized_globals.add('portage_gid')
globals()['portage_uid'] = portage_uid
_initialized_globals.add('portage_uid')
if k == 'portage_gid':
return portage_gid
elif k == 'portage_uid':
return portage_uid
else:
raise AssertionError('unknown name: %s' % k)
elif k == 'userpriv_groups':
v = [_get_global('portage_gid')]
if secpass >= 2:
# Get a list of group IDs for the portage user. Do not use
# grp.getgrall() since it is known to trigger spurious
# SIGPIPE problems with nss_ldap.
cmd = ["id", "-G", _portage_username]
if sys.hexversion < 0x3020000 and sys.hexversion >= 0x3000000:
# Python 3.1 _execvp throws TypeError for non-absolute executable
# path passed as bytes (see https://bugs.python.org/issue8513).
fullname = portage.process.find_binary(cmd[0])
if fullname is None:
globals()[k] = v
_initialized_globals.add(k)
return v
cmd[0] = fullname
encoding = portage._encodings['content']
cmd = [portage._unicode_encode(x,
encoding=encoding, errors='strict') for x in cmd]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
myoutput = proc.communicate()[0]
status = proc.wait()
if os.WIFEXITED(status) and os.WEXITSTATUS(status) == os.EX_OK:
for x in portage._unicode_decode(myoutput,
encoding=encoding, errors='strict').split():
try:
v.append(int(x))
except ValueError:
pass
v = sorted(set(v))
# Avoid instantiating portage.settings when the desired
# variable is set in os.environ.
elif k in ('_portage_grpname', '_portage_username'):
v = None
if k == '_portage_grpname':
env_key = 'PORTAGE_GRPNAME'
else:
env_key = 'PORTAGE_USERNAME'
if env_key in os.environ:
v = os.environ[env_key]
elif hasattr(portage, 'settings'):
v = portage.settings.get(env_key)
else:
# The config class has equivalent code, but we also need to
# do it here if _disable_legacy_globals() has been called.
eroot_or_parent = first_existing(os.path.join(
_target_root(), _target_eprefix().lstrip(os.sep)))
try:
eroot_st = os.stat(eroot_or_parent)
except OSError:
pass
else:
if _unprivileged_mode(eroot_or_parent, eroot_st):
if k == '_portage_grpname':
try:
grp_struct = grp.getgrgid(eroot_st.st_gid)
except KeyError:
pass
else:
v = grp_struct.gr_name
else:
try:
pwd_struct = pwd.getpwuid(eroot_st.st_uid)
except KeyError:
pass
else:
v = pwd_struct.pw_name
if v is None:
v = 'portage'
else:
raise AssertionError('unknown name: %s' % k)
globals()[k] = v
_initialized_globals.add(k)
return v
class _GlobalProxy(portage.proxy.objectproxy.ObjectProxy):
__slots__ = ('_name',)
def __init__(self, name):
portage.proxy.objectproxy.ObjectProxy.__init__(self)
object.__setattr__(self, '_name', name)
def _get_target(self):
return _get_global(object.__getattribute__(self, '_name'))
for k in ('portage_gid', 'portage_uid', 'secpass', 'userpriv_groups',
'_portage_grpname', '_portage_username'):
globals()[k] = _GlobalProxy(k)
del k
def _init(settings):
"""
Use config variables like PORTAGE_GRPNAME and PORTAGE_USERNAME to
initialize global variables. This allows settings to come from make.conf
instead of requiring them to be set in the calling environment.
"""
if '_portage_grpname' not in _initialized_globals and \
'_portage_username' not in _initialized_globals:
# Prevents "TypeError: expected string" errors
# from grp.getgrnam() with PyPy
native_string = platform.python_implementation() == 'PyPy'
v = settings.get('PORTAGE_GRPNAME', 'portage')
if native_string:
v = portage._native_string(v)
globals()['_portage_grpname'] = v
_initialized_globals.add('_portage_grpname')
v = settings.get('PORTAGE_USERNAME', 'portage')
if native_string:
v = portage._native_string(v)
globals()['_portage_username'] = v
_initialized_globals.add('_portage_username')
if 'secpass' not in _initialized_globals:
v = 0
if uid == 0:
v = 2
elif "unprivileged" in settings.features:
v = 2
elif portage_gid in os.getgroups():
v = 1
globals()['secpass'] = v
_initialized_globals.add('secpass')
|
gpl-2.0
|
zhouzhenghui/python-for-android
|
python3-alpha/python3-src/Lib/idlelib/FileList.py
|
55
|
3814
|
import os
from tkinter import *
import tkinter.messagebox as tkMessageBox
class FileList:
# N.B. this import overridden in PyShellFileList.
from idlelib.EditorWindow import EditorWindow
def __init__(self, root):
self.root = root
self.dict = {}
self.inversedict = {}
self.vars = {} # For EditorWindow.getrawvar (shared Tcl variables)
def open(self, filename, action=None):
assert filename
filename = self.canonize(filename)
if os.path.isdir(filename):
# This can happen when bad filename is passed on command line:
tkMessageBox.showerror(
"File Error",
"%r is a directory." % (filename,),
master=self.root)
return None
key = os.path.normcase(filename)
if key in self.dict:
edit = self.dict[key]
edit.top.wakeup()
return edit
if action:
# Don't create window, perform 'action', e.g. open in same window
return action(filename)
else:
edit = self.EditorWindow(self, filename, key)
if edit.good_load:
return edit
else:
edit._close()
return None
def gotofileline(self, filename, lineno=None):
edit = self.open(filename)
if edit is not None and lineno is not None:
edit.gotoline(lineno)
def new(self, filename=None):
return self.EditorWindow(self, filename)
def close_all_callback(self, *args, **kwds):
for edit in list(self.inversedict):
reply = edit.close()
if reply == "cancel":
break
return "break"
def unregister_maybe_terminate(self, edit):
try:
key = self.inversedict[edit]
except KeyError:
print("Don't know this EditorWindow object. (close)")
return
if key:
del self.dict[key]
del self.inversedict[edit]
if not self.inversedict:
self.root.quit()
def filename_changed_edit(self, edit):
edit.saved_change_hook()
try:
key = self.inversedict[edit]
except KeyError:
print("Don't know this EditorWindow object. (rename)")
return
filename = edit.io.filename
if not filename:
if key:
del self.dict[key]
self.inversedict[edit] = None
return
filename = self.canonize(filename)
newkey = os.path.normcase(filename)
if newkey == key:
return
if newkey in self.dict:
conflict = self.dict[newkey]
self.inversedict[conflict] = None
tkMessageBox.showerror(
"Name Conflict",
"You now have multiple edit windows open for %r" % (filename,),
master=self.root)
self.dict[newkey] = edit
self.inversedict[edit] = newkey
if key:
try:
del self.dict[key]
except KeyError:
pass
def canonize(self, filename):
if not os.path.isabs(filename):
try:
pwd = os.getcwd()
except os.error:
pass
else:
filename = os.path.join(pwd, filename)
return os.path.normpath(filename)
def _test():
from idlelib.EditorWindow import fixwordbreaks
import sys
root = Tk()
fixwordbreaks(root)
root.withdraw()
flist = FileList(root)
if sys.argv[1:]:
for filename in sys.argv[1:]:
flist.open(filename)
else:
flist.new()
if flist.inversedict:
root.mainloop()
if __name__ == '__main__':
_test()
|
apache-2.0
|
ddico/server-tools
|
base_module_doc_rst/wizard/tech_rst_guide.py
|
23
|
17280
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm, fields
from openerp import netsvc
import base64
import tempfile
import tarfile
import httplib
import os
class RstDoc(object):
def __init__(self, module, objects):
self.dico = {
'name': module.name,
'shortdesc': module.shortdesc,
'latest_version': module.latest_version,
'website': module.website,
'description': self._handle_text(
module.description.strip() or 'None'
),
'report_list': self._handle_list_items(module.reports_by_module),
'menu_list': self._handle_list_items(module.menus_by_module),
'view_list': self._handle_list_items(module.views_by_module),
'depends': module.dependencies_id,
'author': module.author,
}
self.objects = objects
self.module = module
def _handle_list_items(self, list_item_as_string):
list_item_as_string = list_item_as_string.strip()
if list_item_as_string:
return [
item.replace("*", r"\*") for
item in
list_item_as_string.split('\n')
]
else:
return []
def _handle_text(self, txt):
lst = [' %s' % line for line in txt.split('\n')]
return '\n'.join(lst)
def _get_download_links(self):
def _is_connection_status_good(link):
server = "openerp.com"
status_good = False
try:
conn = httplib.HTTPConnection(server)
conn.request("HEAD", link)
res = conn.getresponse()
if res.status in (200, ):
status_good = True
except (Exception, ), e:
logger = netsvc.Logger()
msg = """
error connecting to server '%s' with link '%s'.
Error message: %s
""" % (server, link, str(e))
logger.notifyChannel(
"base_module_doc_rst", netsvc.LOG_ERROR, msg
)
status_good = False
return status_good
versions = ('7.0', '8.0', 'master')
download_links = []
for ver in versions:
link = 'https://apps.odoo.com/loempia/download/%s/%s.zip' % (
ver, self.dico['name']
)
if _is_connection_status_good(link):
download_links.append(" * `%s <%s>`_" % (ver, link))
if download_links:
res = '\n'.join(download_links)
else:
res = "(No download links available)"
return res
def _write_header(self):
dico = self.dico
title = "%s (*%s*)" % (dico['shortdesc'], dico['name'])
title_underline = "=" * len(title)
dico['title'] = title
dico['title_underline'] = title_underline
dico['download_links'] = self._get_download_links()
sl = [
"",
".. module:: %(name)s",
" :synopsis: %(shortdesc)s",
" :noindex:",
".. ",
"",
".. raw:: html",
"",
" <br />",
"""
<link rel="stylesheet"
href="../_static/hide_objects_in_sidebar.css"
type="text/css" />
""",
"",
"",
".. raw:: html",
"",
"""
<div class="js-kit-rating"
title="" permalink="" standalone="yes" path="/%s"></div>
""" % (dico['name'],),
""" <script src="http://js-kit.com/ratings.js"></script>""",
"",
"%(title)s",
"%(title_underline)s",
":Module: %(name)s",
":Name: %(shortdesc)s",
":Version: %(latest_version)s",
":Author: %(author)s",
":Directory: %(name)s",
":Web: %(website)s",
"",
"Description",
"-----------",
"",
"::",
"",
"%(description)s",
"",
"Download links",
"--------------",
"",
"You can download this module as a zip file in following version:",
"",
"%(download_links)s",
"",
""]
return '\n'.join(sl) % (dico)
def _write_reports(self):
sl = ["",
"Reports",
"-------"]
reports = self.dico['report_list']
if reports:
for report in reports:
if report:
sl.append("")
sl.append(" * %s" % report)
else:
sl.extend(["", "None", ""])
sl.append("")
return '\n'.join(sl)
def _write_menus(self):
sl = ["",
"Menus",
"-------",
""]
menus = self.dico['menu_list']
if menus:
for menu in menus:
if menu:
sl.append(" * %s" % menu)
else:
sl.extend(["", "None", ""])
sl.append("")
return '\n'.join(sl)
def _write_views(self):
sl = ["",
"Views",
"-----",
""]
views = self.dico['view_list']
if views:
for view in views:
if view:
sl.append(" * %s" % view)
else:
sl.extend(["", "None", ""])
sl.append("")
return '\n'.join(sl)
def _write_depends(self):
sl = ["",
"Dependencies",
"------------",
""]
depends = self.dico['depends']
if depends:
for dependency in depends:
sl.append(" * :mod:`%s`" % (dependency.name))
else:
sl.extend(["", "None", ""])
sl.append("")
return '\n'.join(sl)
def _write_objects(self):
def write_field(field_def):
if not isinstance(field_def, tuple):
logger = netsvc.Logger()
msg = "Error on Object %s: field_def: %s [type: %s]" % (
obj_name.encode('utf8'),
field_def.encode('utf8'),
type(field_def)
)
logger.notifyChannel(
"base_module_doc_rst", netsvc.LOG_ERROR, msg
)
return ""
field_name = field_def[0]
field_dict = field_def[1]
field_required = field_dict.get('required', '') and ', required'
field_readonly = field_dict.get('readonly', '') and ', readonly'
field_help_s = field_dict.get('help', '')
if field_help_s:
field_help_s = "*%s*" % (field_help_s)
field_help = '\n'.join(
[
' %s' % line.strip() for
line in
field_help_s.split('\n')
]
)
else:
field_help = ''
sl = [
"",
":%s: %s, %s%s%s" % (field_name,
field_dict.get('string', 'Unknown'),
field_dict['type'],
field_required,
field_readonly),
"",
field_help,
]
return '\n'.join(sl)
sl = ["",
"",
"Objects",
"-------"]
if self.objects:
for obj in self.objects:
obj_name = obj['object'].name
obj_model = obj['object'].model
title = "Object: %s (%s)" % (obj_name, obj_model)
slo = [
"",
title,
'#' * len(title),
"",
]
for field in obj['fields']:
slf = [
"",
write_field(field),
"",
]
slo.extend(slf)
sl.extend(slo)
else:
sl.extend(["", "None", ""])
return u'\n'.join([a.decode('utf8') for a in sl])
def _write_relationship_graph(self, module_name=False):
sl = ["",
"Relationship Graph",
"------------------",
"",
".. figure:: %s_module.png" % (module_name, ),
" :scale: 50",
" :align: center",
""]
sl.append("")
return '\n'.join(sl)
def write(self, module_name=False):
s = ''
s += self._write_header()
s += self._write_depends()
s += self._write_reports()
s += self._write_menus()
s += self._write_views()
s += self._write_objects()
if module_name:
s += self._write_relationship_graph(module_name)
return s
class WizardTechGuideRst(orm.TransientModel):
_name = "tech.guide.rst"
_columns = {
'rst_file': fields.binary('File', required=True, readonly=True),
}
def _generate(self, cr, uid, context):
module_model = self.pool.get('ir.module.module')
module_ids = context['active_ids']
module_index = []
# create a temporary gzipped tarfile:
tgz_tmp_filename = tempfile.mktemp('_rst_module_doc.tgz')
try:
tarf = tarfile.open(tgz_tmp_filename, 'w:gz')
modules = module_model.browse(cr, uid, module_ids)
for module in modules:
index_dict = {
'name': module.name,
'shortdesc': module.shortdesc,
}
module_index.append(index_dict)
objects = self._get_objects(cr, uid, module)
module.test_views = self._get_views(
cr, uid, module.id, context=context
)
rstdoc = RstDoc(module, objects)
# Append Relationship Graph on rst
graph_mod = False
module_name = False
if module.file_graph:
graph_mod = base64.decodestring(module.file_graph)
else:
module_data = module_model.get_relation_graph(
cr, uid, module.name, context=context
)
if module_data['module_file']:
graph_mod = base64.decodestring(
module_data['module_file']
)
if graph_mod:
module_name = module.name
try:
tmp_file_graph = tempfile.NamedTemporaryFile()
tmp_file_graph.write(graph_mod)
tmp_file_graph.file.flush()
tarf.add(
tmp_file_graph.name,
arcname=module.name + '_module.png'
)
finally:
tmp_file_graph.close()
out = rstdoc.write(module_name)
try:
tmp_file = tempfile.NamedTemporaryFile()
tmp_file.write(out.encode('utf8'))
tmp_file.file.flush() # write content to file
tarf.add(tmp_file.name, arcname=module.name + '.rst')
finally:
tmp_file.close()
# write index file:
tmp_file = tempfile.NamedTemporaryFile()
out = self._create_index(module_index)
tmp_file.write(out.encode('utf8'))
tmp_file.file.flush()
tarf.add(tmp_file.name, arcname='index.rst')
finally:
tarf.close()
f = open(tgz_tmp_filename, 'rb')
out = f.read()
f.close()
if os.path.exists(tgz_tmp_filename):
try:
os.unlink(tgz_tmp_filename)
except Exception, e:
logger = netsvc.Logger()
msg = "Temporary file %s could not be deleted. (%s)" % (
tgz_tmp_filename, e
)
logger.notifyChannel("warning", netsvc.LOG_WARNING, msg)
return base64.encodestring(out)
def _get_views(self, cr, uid, module_id, context=None):
module_module_obj = self.pool.get('ir.module.module')
model_data_obj = self.pool.get('ir.model.data')
view_obj = self.pool.get('ir.ui.view')
report_obj = self.pool.get('ir.actions.report.xml')
menu_obj = self.pool.get('ir.ui.menu')
res = {}
mlist = module_module_obj.browse(cr, uid, [module_id], context=context)
mnames = {}
for m in mlist:
mnames[m.name] = m.id
res[m.id] = {
'menus_by_module': [],
'reports_by_module': [],
'views_by_module': []
}
view_id = model_data_obj.search(
cr,
uid,
[
('module', 'in', mnames.keys()),
('model', 'in', (
'ir.ui.view', 'ir.actions.report.xml', 'ir.ui.menu'
))
]
)
for data_id in model_data_obj.browse(cr, uid, view_id, context):
# We use try except, because views or menus may not exist
try:
key = data_id['model']
if key == 'ir.ui.view':
v = view_obj.browse(cr, uid, data_id.res_id)
v_dict = {
'name': v.name,
'inherit': v.inherit_id,
'type': v.type}
res[mnames[data_id.module]]['views_by_module'].append(
v_dict
)
elif key == 'ir.actions.report.xml':
res[mnames[data_id.module]]['reports_by_module'].append(
report_obj.browse(cr, uid, data_id.res_id).name
)
elif key == 'ir.ui.menu':
res[mnames[data_id.module]]['menus_by_module'].append(
menu_obj.browse(cr, uid, data_id.res_id).complete_name
)
except (KeyError, ):
pass
return res
def _create_index(self, module_index):
sl = ["",
".. _module-technical-guide-link:",
"",
"Module Technical Guide: Introspection report on objects",
"=======================================================",
"",
".. toctree::",
" :maxdepth: 1",
"",
]
for mod in module_index:
sl.append(" %s" % mod['name'])
sl.append("")
return '\n'.join(sl)
def _get_objects(self, cr, uid, module):
res = []
objects = self._object_find(cr, uid, module)
for obj in objects:
fields = self._fields_find(cr, uid, obj.model)
dico = {
'object': obj,
'fields': fields
}
res.append(dico)
return res
def _object_find(self, cr, uid, module):
ir_model_data = self.pool.get('ir.model.data')
ids2 = ir_model_data.search(
cr, uid, [('module', '=', module.name), ('model', '=', 'ir.model')]
)
ids = []
for mod in ir_model_data.browse(cr, uid, ids2):
ids.append(mod.res_id)
return self.pool.get('ir.model').browse(cr, uid, ids)
def _fields_find(self, cr, uid, obj):
modobj = self.pool.get(obj)
if modobj:
res = modobj.fields_get(cr, uid).items()
return res
else:
logger = netsvc.Logger()
msg = "Object %s not found" % (obj)
logger.notifyChannel("base_module_doc_rst", netsvc.LOG_ERROR, msg)
return ""
_defaults = {
'rst_file': _generate,
}
|
agpl-3.0
|
steventimberman/masterDebater
|
venv/lib/python2.7/site-packages/pbr/hooks/files.py
|
98
|
3681
|
# Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import sys
from pbr import find_package
from pbr.hooks import base
def get_manpath():
manpath = 'share/man'
if os.path.exists(os.path.join(sys.prefix, 'man')):
# This works around a bug with install where it expects every node
# in the relative data directory to be an actual directory, since at
# least Debian derivatives (and probably other platforms as well)
# like to symlink Unixish /usr/local/man to /usr/local/share/man.
manpath = 'man'
return manpath
def get_man_section(section):
return os.path.join(get_manpath(), 'man%s' % section)
class FilesConfig(base.BaseConfig):
section = 'files'
def __init__(self, config, name):
super(FilesConfig, self).__init__(config)
self.name = name
self.data_files = self.config.get('data_files', '')
def save(self):
self.config['data_files'] = self.data_files
super(FilesConfig, self).save()
def expand_globs(self):
finished = []
for line in self.data_files.split("\n"):
if line.rstrip().endswith('*') and '=' in line:
(target, source_glob) = line.split('=')
source_prefix = source_glob.strip()[:-1]
target = target.strip()
if not target.endswith(os.path.sep):
target += os.path.sep
for (dirpath, dirnames, fnames) in os.walk(source_prefix):
finished.append(
"%s = " % dirpath.replace(source_prefix, target))
finished.extend(
[" %s" % os.path.join(dirpath, f) for f in fnames])
else:
finished.append(line)
self.data_files = "\n".join(finished)
def add_man_path(self, man_path):
self.data_files = "%s\n%s =" % (self.data_files, man_path)
def add_man_page(self, man_page):
self.data_files = "%s\n %s" % (self.data_files, man_page)
def get_man_sections(self):
man_sections = dict()
manpages = self.pbr_config['manpages']
for manpage in manpages.split():
section_number = manpage.strip()[-1]
section = man_sections.get(section_number, list())
section.append(manpage.strip())
man_sections[section_number] = section
return man_sections
def hook(self):
packages = self.config.get('packages', self.name).strip()
expanded = []
for pkg in packages.split("\n"):
if os.path.isdir(pkg.strip()):
expanded.append(find_package.smart_find_packages(pkg.strip()))
self.config['packages'] = "\n".join(expanded)
self.expand_globs()
if 'manpages' in self.pbr_config:
man_sections = self.get_man_sections()
for (section, pages) in man_sections.items():
manpath = get_man_section(section)
self.add_man_path(manpath)
for page in pages:
self.add_man_page(page)
|
mit
|
imaculate/scikit-learn
|
benchmarks/bench_mnist.py
|
38
|
6799
|
"""
=======================
MNIST dataset benchmark
=======================
Benchmark on the MNIST dataset. The dataset comprises 70,000 samples
and 784 features. Here, we consider the task of predicting
10 classes - digits from 0 to 9 from their raw images. By contrast to the
covertype dataset, the feature space is homogenous.
Example of output :
[..]
Classification performance:
===========================
Classifier train-time test-time error-rate
------------------------------------------------------------
MLP_adam 53.46s 0.11s 0.0224
Nystroem-SVM 112.97s 0.92s 0.0228
MultilayerPerceptron 24.33s 0.14s 0.0287
ExtraTrees 42.99s 0.57s 0.0294
RandomForest 42.70s 0.49s 0.0318
SampledRBF-SVM 135.81s 0.56s 0.0486
LinearRegression-SAG 16.67s 0.06s 0.0824
CART 20.69s 0.02s 0.1219
dummy 0.00s 0.01s 0.8973
"""
from __future__ import division, print_function
# Author: Issam H. Laradji
# Arnaud Joly <arnaud.v.joly@gmail.com>
# License: BSD 3 clause
import os
from time import time
import argparse
import numpy as np
from sklearn.datasets import fetch_mldata
from sklearn.datasets import get_data_home
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.dummy import DummyClassifier
from sklearn.externals.joblib import Memory
from sklearn.kernel_approximation import Nystroem
from sklearn.kernel_approximation import RBFSampler
from sklearn.metrics import zero_one_loss
from sklearn.pipeline import make_pipeline
from sklearn.svm import LinearSVC
from sklearn.tree import DecisionTreeClassifier
from sklearn.utils import check_array
from sklearn.linear_model import LogisticRegression
from sklearn.neural_network import MLPClassifier
# Memoize the data extraction and memory map the resulting
# train / test splits in readonly mode
memory = Memory(os.path.join(get_data_home(), 'mnist_benchmark_data'),
mmap_mode='r')
@memory.cache
def load_data(dtype=np.float32, order='F'):
"""Load the data, then cache and memmap the train/test split"""
######################################################################
# Load dataset
print("Loading dataset...")
data = fetch_mldata('MNIST original')
X = check_array(data['data'], dtype=dtype, order=order)
y = data["target"]
# Normalize features
X = X / 255
# Create train-test split (as [Joachims, 2006])
print("Creating train-test split...")
n_train = 60000
X_train = X[:n_train]
y_train = y[:n_train]
X_test = X[n_train:]
y_test = y[n_train:]
return X_train, X_test, y_train, y_test
ESTIMATORS = {
"dummy": DummyClassifier(),
'CART': DecisionTreeClassifier(),
'ExtraTrees': ExtraTreesClassifier(n_estimators=100),
'RandomForest': RandomForestClassifier(n_estimators=100),
'Nystroem-SVM': make_pipeline(
Nystroem(gamma=0.015, n_components=1000), LinearSVC(C=100)),
'SampledRBF-SVM': make_pipeline(
RBFSampler(gamma=0.015, n_components=1000), LinearSVC(C=100)),
'LinearRegression-SAG': LogisticRegression(solver='sag', tol=1e-1, C=1e4),
'MultilayerPerceptron': MLPClassifier(
hidden_layer_sizes=(100, 100), max_iter=400, alpha=1e-4,
algorithm='sgd', learning_rate_init=0.2, momentum=0.9, verbose=1,
tol=1e-4, random_state=1),
'MLP-adam': MLPClassifier(
hidden_layer_sizes=(100, 100), max_iter=400, alpha=1e-4,
algorithm='adam', learning_rate_init=0.001, verbose=1,
tol=1e-4, random_state=1)
}
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--classifiers', nargs="+",
choices=ESTIMATORS, type=str,
default=['ExtraTrees', 'Nystroem-SVM'],
help="list of classifiers to benchmark.")
parser.add_argument('--n-jobs', nargs="?", default=1, type=int,
help="Number of concurrently running workers for "
"models that support parallelism.")
parser.add_argument('--order', nargs="?", default="C", type=str,
choices=["F", "C"],
help="Allow to choose between fortran and C ordered "
"data")
parser.add_argument('--random-seed', nargs="?", default=0, type=int,
help="Common seed used by random number generator.")
args = vars(parser.parse_args())
print(__doc__)
X_train, X_test, y_train, y_test = load_data(order=args["order"])
print("")
print("Dataset statistics:")
print("===================")
print("%s %d" % ("number of features:".ljust(25), X_train.shape[1]))
print("%s %d" % ("number of classes:".ljust(25), np.unique(y_train).size))
print("%s %s" % ("data type:".ljust(25), X_train.dtype))
print("%s %d (size=%dMB)" % ("number of train samples:".ljust(25),
X_train.shape[0], int(X_train.nbytes / 1e6)))
print("%s %d (size=%dMB)" % ("number of test samples:".ljust(25),
X_test.shape[0], int(X_test.nbytes / 1e6)))
print()
print("Training Classifiers")
print("====================")
error, train_time, test_time = {}, {}, {}
for name in sorted(args["classifiers"]):
print("Training %s ... " % name, end="")
estimator = ESTIMATORS[name]
estimator_params = estimator.get_params()
estimator.set_params(**{p: args["random_seed"]
for p in estimator_params
if p.endswith("random_state")})
if "n_jobs" in estimator_params:
estimator.set_params(n_jobs=args["n_jobs"])
time_start = time()
estimator.fit(X_train, y_train)
train_time[name] = time() - time_start
time_start = time()
y_pred = estimator.predict(X_test)
test_time[name] = time() - time_start
error[name] = zero_one_loss(y_test, y_pred)
print("done")
print()
print("Classification performance:")
print("===========================")
print("{0: <24} {1: >10} {2: >11} {3: >12}"
"".format("Classifier ", "train-time", "test-time", "error-rate"))
print("-" * 60)
for name in sorted(args["classifiers"], key=error.get):
print("{0: <23} {1: >10.2f}s {2: >10.2f}s {3: >12.4f}"
"".format(name, train_time[name], test_time[name], error[name]))
print()
|
bsd-3-clause
|
kennethlyn/parallella-lcd-linux
|
tools/perf/scripts/python/netdev-times.py
|
1544
|
15191
|
# Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, callchain, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, callchain, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, callchain, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
callchain, irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, callchain, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, callchain, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, callchain, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, callchain, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, callchain,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, callchain,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, callchain,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, callchain, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, callchain,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
|
gpl-2.0
|
nightflyer73/plugin.video.3bmeteo
|
default.py
|
1
|
1616
|
# -*- coding: utf-8 -*-
import os
import sys
import xbmc
import xbmcgui
import xbmcplugin
import xbmcaddon
import urllib
import urllib2
import urlparse
from xml.dom import minidom
# plugin constants
__plugin__ = "plugin.video.3bmeteo"
__author__ = "Nightflyer"
Addon = xbmcaddon.Addon(id=__plugin__)
# plugin handle
handle = int(sys.argv[1])
# utility functions
def parameters_string_to_dict(parameters):
''' Convert parameters encoded in a URL to a dict. '''
paramDict = dict(urlparse.parse_qsl(parameters[1:]))
return paramDict
def addLinkItem(url, li):
return xbmcplugin.addDirectoryItem(handle=handle, url=url,
listitem=li, isFolder=False)
# UI builder functions
def show_root_menu():
''' Show the plugin root menu '''
# 3bmeteo Android app
userAgent = "Dalvik/1.6.0 (Linux; U; Android 4.2.2; GT-I9105P Build/JDQ39)"
url = "http://api.3bmeteo.com/mobile/video_previsionali_feed"
headers = {'User-Agent': userAgent}
req = urllib2.Request(url, None, headers)
xmldata = urllib2.urlopen(req).read()
dom = minidom.parseString(xmldata)
# Parse video feed
for videoNode in dom.getElementsByTagName('video'):
link = videoNode.getElementsByTagName('url')[0].firstChild.nodeValue
imageUrl = videoNode.getElementsByTagName('thumbnail')[0].firstChild.nodeValue
title = videoNode.getElementsByTagName('titolo')[0].firstChild.nodeValue
liStyle = xbmcgui.ListItem(title, thumbnailImage=imageUrl)
addLinkItem(link, liStyle)
xbmcplugin.endOfDirectory(handle=handle, succeeded=True)
show_root_menu()
|
gpl-3.0
|
dharmabumstead/ansible
|
lib/ansible/modules/web_infrastructure/apache2_module.py
|
16
|
8135
|
#!/usr/bin/python
# coding: utf-8 -*-
# (c) 2013-2014, Christian Berendt <berendt@b1-systems.de>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: apache2_module
version_added: 1.6
author:
- Christian Berendt (@berendt)
- Ralf Hertel (@n0trax)
- Robin Roth (@robinro)
short_description: Enables/disables a module of the Apache2 webserver.
description:
- Enables or disables a specified module of the Apache2 webserver.
options:
name:
description:
- Name of the module to enable/disable as given to C(a2enmod/a2dismod).
required: true
identifier:
description:
- Identifier of the module as listed by C(apache2ctl -M).
This is optional and usually determined automatically by the common convention of
appending C(_module) to I(name) as well as custom exception for popular modules.
required: False
version_added: "2.5"
force:
description:
- Force disabling of default modules and override Debian warnings.
required: false
type: bool
default: False
version_added: "2.1"
state:
description:
- Desired state of the module.
choices: ['present', 'absent']
default: present
ignore_configcheck:
description:
- Ignore configuration checks about inconsistent module configuration. Especially for mpm_* modules.
type: bool
default: False
version_added: "2.3"
requirements: ["a2enmod","a2dismod"]
'''
EXAMPLES = '''
# enables the Apache2 module "wsgi"
- apache2_module:
state: present
name: wsgi
# disables the Apache2 module "wsgi"
- apache2_module:
state: absent
name: wsgi
# disable default modules for Debian
- apache2_module:
state: absent
name: autoindex
force: True
# disable mpm_worker and ignore warnings about missing mpm module
- apache2_module:
state: absent
name: mpm_worker
ignore_configcheck: True
# enable dump_io module, which is identified as dumpio_module inside apache2
- apache2_module:
state: present
name: dump_io
identifier: dumpio_module
'''
RETURN = '''
result:
description: message about action taken
returned: always
type: string
warnings:
description: list of warning messages
returned: when needed
type: list
rc:
description: return code of underlying command
returned: failed
type: int
stdout:
description: stdout of underlying command
returned: failed
type: string
stderr:
description: stderr of underlying command
returned: failed
type: string
'''
import re
def _run_threaded(module):
control_binary = _get_ctl_binary(module)
result, stdout, stderr = module.run_command("%s -V" % control_binary)
return bool(re.search(r'threaded:[ ]*yes', stdout))
def _get_ctl_binary(module):
for command in ['apache2ctl', 'apachectl']:
ctl_binary = module.get_bin_path(command)
if ctl_binary is not None:
return ctl_binary
module.fail_json(
msg="Neither of apache2ctl nor apachctl found."
" At least one apache control binary is necessary."
)
def _module_is_enabled(module):
control_binary = _get_ctl_binary(module)
result, stdout, stderr = module.run_command("%s -M" % control_binary)
if result != 0:
error_msg = "Error executing %s: %s" % (control_binary, stderr)
if module.params['ignore_configcheck']:
if 'AH00534' in stderr and 'mpm_' in module.params['name']:
module.warnings.append(
"No MPM module loaded! apache2 reload AND other module actions"
" will fail if no MPM module is loaded immediately."
)
else:
module.warnings.append(error_msg)
return False
else:
module.fail_json(msg=error_msg)
searchstring = ' ' + module.params['identifier']
return searchstring in stdout
def create_apache_identifier(name):
"""
By convention if a module is loaded via name, it appears in apache2ctl -M as
name_module.
Some modules don't follow this convention and we use replacements for those."""
# a2enmod name replacement to apache2ctl -M names
text_workarounds = [
('shib2', 'mod_shib'),
('evasive', 'evasive20_module'),
]
# re expressions to extract subparts of names
re_workarounds = [
('php', r'^(php\d)\.'),
]
for a2enmod_spelling, module_name in text_workarounds:
if a2enmod_spelling in name:
return module_name
for search, reexpr in re_workarounds:
if search in name:
try:
rematch = re.search(reexpr, name)
return rematch.group(1) + '_module'
except AttributeError:
pass
return name + '_module'
def _set_state(module, state):
name = module.params['name']
force = module.params['force']
want_enabled = state == 'present'
state_string = {'present': 'enabled', 'absent': 'disabled'}[state]
a2mod_binary = {'present': 'a2enmod', 'absent': 'a2dismod'}[state]
success_msg = "Module %s %s" % (name, state_string)
if _module_is_enabled(module) != want_enabled:
if module.check_mode:
module.exit_json(changed=True,
result=success_msg,
warnings=module.warnings)
a2mod_binary = module.get_bin_path(a2mod_binary)
if a2mod_binary is None:
module.fail_json(msg="%s not found. Perhaps this system does not use %s to manage apache" % (a2mod_binary, a2mod_binary))
if not want_enabled and force:
# force exists only for a2dismod on debian
a2mod_binary += ' -f'
result, stdout, stderr = module.run_command("%s %s" % (a2mod_binary, name))
if _module_is_enabled(module) == want_enabled:
module.exit_json(changed=True,
result=success_msg,
warnings=module.warnings)
else:
msg = (
'Failed to set module {name} to {state}:\n'
'{stdout}\n'
'Maybe the module identifier ({identifier}) was guessed incorrectly.'
'Consider setting the "identifier" option.'
).format(
name=name,
state=state_string,
stdout=stdout,
identifier=module.params['identifier']
)
module.fail_json(msg=msg,
rc=result,
stdout=stdout,
stderr=stderr)
else:
module.exit_json(changed=False,
result=success_msg,
warnings=module.warnings)
def main():
module = AnsibleModule(
argument_spec=dict(
name=dict(required=True),
identifier=dict(required=False, type='str'),
force=dict(required=False, type='bool', default=False),
state=dict(default='present', choices=['absent', 'present']),
ignore_configcheck=dict(required=False, type='bool', default=False),
),
supports_check_mode=True,
)
module.warnings = []
name = module.params['name']
if name == 'cgi' and _run_threaded(module):
module.fail_json(msg="Your MPM seems to be threaded. No automatic actions on module %s possible." % name)
if not module.params['identifier']:
module.params['identifier'] = create_apache_identifier(module.params['name'])
if module.params['state'] in ['present', 'absent']:
_set_state(module, module.params['state'])
# import module snippets
from ansible.module_utils.basic import AnsibleModule
if __name__ == '__main__':
main()
|
gpl-3.0
|
tiefpunkt/thingstore
|
thingstore/views.py
|
1
|
7148
|
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render, get_object_or_404, render_to_response
from django.utils.timezone import now
from datetime import timedelta
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login, logout
from django.core.context_processors import csrf
from django.core.urlresolvers import reverse
from django.contrib.auth.decorators import login_required
from django.forms.models import inlineformset_factory
from django.conf import settings
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from thingstore.models import Thing, Value, APIKey, Metric
from thingstore.forms import ThingForm
""" Index page. Contains lists of users and things """
def index(request):
latest_things = Thing.objects.order_by('name')[:5]
users = User.objects.order_by('-last_login')[:5]
context = {'latest_things': latest_things, 'users': users}
return render(request, 'thingstore/index.html', context)
""" About page. Static """
def about(request):
context = {}
return render(request, 'thingstore/about.html', context)
""" Thing detail page """
def thing(request, thing_id):
timeframe_hours = 12
# Create Querysets
thing = get_object_or_404(Thing, pk=thing_id)
metrics = thing.metrics.all()
values = Value.objects.filter(metric__in = metrics, timestamp__gte = now()-timedelta(hours=timeframe_hours))
# Put values as dict in a bigger dict
relation_dict = {}
for value in values:
value_dict = value.__dict__
value_dict['js_time'] = value.js_time
relation_dict.setdefault(value.metric_id, []).append(value_dict)
# Create dict of metrics, add values from ^ dict
metrics_list = []
for metric in metrics:
metric_dict = metric.__dict__
metric_dict['current_value'] = metric.current_value
if metric.id in relation_dict:
metric_dict['value_dict'] = relation_dict[metric.id]
# add latest invisible value to have a line out of nowhere into the left side of the graph
invisible_value = Value.objects.filter(metric = metric, timestamp__lt = now()-timedelta(hours=timeframe_hours))[:1]
if len(invisible_value) == 1:
value_dict = invisible_value[0].__dict__
value_dict['js_time'] = invisible_value[0].js_time
metric_dict['value_dict'].append(value_dict)
metrics_list.append(metric_dict)
return render(request, 'thingstore/thing.html',
{
'thing': thing,
'metrics': metrics_list,
'timeframe_hours': timeframe_hours
}
)
""" User detail page """
def user(request, username):
user = get_object_or_404(User, username__iexact=username)
return render(request, 'thingstore/user.html', {'selected_user': user})
def login_view(request):
#state = "Please log in below..."
username = password = ''
parameters = {}
parameters.update(csrf(request))
if request.POST:
username = request.POST.get('username')
password = request.POST.get('password')
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
return HttpResponseRedirect(reverse('thingstore.views.index'))
else:
parameters['alert'] = "Your account is not active, please contact the site admin."
else:
parameters['alert'] = "Your username and/or password were incorrect."
parameters['username'] = username
return render_to_response('thingstore/login.html',parameters)
def logout_view(request):
logout(request)
return HttpResponseRedirect(reverse('thingstore.views.index'))
@login_required
def settings_personal(request):
parameters = {}
if request.POST:
action = request.POST.get('action')
user = request.user
if action == "change_password":
curr_password = request.POST.get('curr_password')
password = request.POST.get('new_password')
password_retype = request.POST.get('new_password_retype')
if not user.check_password(curr_password):
parameters["error"] = "Please enter your correct current password"
elif password <> password_retype:
parameters["error"] = "Please type the same password twice."
elif len(password) < settings.PASSWORD_MIN_LENGTH:
parameters["error"] = "Your password has to be at least " + str(settings.PASSWORD_MIN_LENGTH) + " characters long."
else:
#TODO: Password requirements
user.set_password(password)
user.save()
parameters["message"] = "Password successfully changed."
elif action == "change_email":
curr_password = request.POST.get('curr_password')
email = request.POST.get('email')
if not user.check_password(curr_password):
parameters["error"] = "Please enter your correct current password"
else:
try:
validate_email(email)
except ValidationError as e:
parameters["error"] = "Please enter a valid email address"
else:
user.email = email
user.save()
parameters["message"] = "Email address successfully changed."
parameters["tab"] = "personal"
return render(request, 'thingstore/settings_personal.html', parameters)
@login_required
def settings_apikeys(request):
parameters = {}
parameters["tab"] = "apikeys"
parameters["apikeys"] = request.user.apikeys.all()
return render(request, 'thingstore/settings_apikeys.html', parameters)
@login_required
def settings_apikeys_add(request):
apikey = APIKey.create(request.user)
apikey.save()
return HttpResponseRedirect(reverse('thingstore.views.settings_apikeys'))
@login_required
def settings_apikeys_del(request, apikey_id):
apikey = APIKey.objects.get(pk=apikey_id)
if apikey.user != request.user:
# TODO: Error message
return HttpResponseRedirect(reverse('thingstore.views.settings_apikeys'))
apikey.delete()
#TODO: Success message
return HttpResponseRedirect(reverse('thingstore.views.settings_apikeys'))
""" Thing editor """
# TODO: Error handling
@login_required
def thing_editor(request, thing_id = None):
# TODO: Move to forms.py
MetricFormSet = inlineformset_factory(Thing, Metric)
thing = None
if request.method == "POST":
if thing_id:
thing = get_object_or_404(Thing, pk=thing_id)
if thing.owner <> request.user:
# TODO: Error Message
return HttpResponseRedirect(thing.get_absolute_url())
thing_form = ThingForm(request.POST, instance=thing)
else:
thing_form = ThingForm(request.POST)
if thing_form.is_valid():
thing_from_form = thing_form.save(commit = False)
thing_from_form.owner = request.user
formset = MetricFormSet(request.POST, instance=thing_from_form)
if formset.is_valid():
thing_from_form.save()
formset.save()
return HttpResponseRedirect(thing_from_form.get_absolute_url())
else:
if thing_id:
thing = get_object_or_404(Thing, pk=thing_id)
if thing.owner <> request.user:
# TODO: Error Message
return HttpResponseRedirect(thing.get_absolute_url())
thing_form = ThingForm(instance=thing)
formset = MetricFormSet(instance=thing)
else:
thing_form = ThingForm()
formset = MetricFormSet()
return render(request, 'thingstore/thing_edit.html',
{
'thing': thing,
'thing_form': thing_form,
'metric_formset': formset
}
)
|
mit
|
staudt/everest-legacy
|
lib/deps/httplib2/multipart.py
|
1
|
4134
|
# #!/usr/bin/env python
# -*- coding: utf-8 -*-
# <bolacha - http library for python, with cookies and upload support>
# Copyright (C) <2010> Gabriel Falcão <gabriel@nacaolivre.org>
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import types
from uuid import uuid4
from urllib import quote_plus, urlencode
from glob import glob
from os.path import basename
from mimetypes import guess_type
BOUNDARY = uuid4().hex
class file_part:
def __init__(self, file_item, headers):
self.file_item = file_item;
self.headers = headers;
def is_file(obj):
return isinstance(obj, file_part)
def to_str(s, encoding='utf-8', strings_only=False, errors='strict'):
""" took from django smart_str """
if strings_only and isinstance(s, (types.NoneType, int)):
return s
if not isinstance(s, basestring):
try:
return str(s)
except UnicodeEncodeError:
if isinstance(s, Exception):
# An Exception subclass containing non-ASCII data that doesn't
# know how to print itself properly. We shouldn't raise a
# further exception.
return ' '.join([smart_str(arg, encoding, strings_only,
errors) for arg in s])
return unicode(s).encode(encoding, errors)
elif isinstance(s, unicode):
return s.encode(encoding, errors)
elif s and encoding != 'utf-8':
return s.decode('utf-8', errors).encode(encoding, errors)
else:
return s
def expand_items(dictionary):
"""
Given a dict like {'key': ('value1', 'value2')} returns
a list like [('key','value1'), ('key', 'value2')]
"""
items = []
for key, value in dictionary.items():
if isinstance(value, (list, tuple)):
items.extend([(key, item) for item in value])
else:
items.append((key, value))
return items
def encode_multipart(boundary, data):
lines = []
for key, value in expand_items(data):
if is_file(value):
lines.extend(encode_file(boundary, key, value))
else:
if is_file(value):
lines.extend(encode_file(boundary, key, value))
else:
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % to_str(key),
'',
to_str(value)
])
lines.extend([
'--' + boundary + '--',
'',
])
return '\r\n'.join(lines)
def guess_mime(path):
mime, x = guess_type(path)
return mime or 'application/octet-stream'
def encode_file(boundary, key, file):
pre = [
'--' + boundary,
'Content-Disposition: form-data; name="%s"; filename="%s"' \
% (to_str(key), to_str(basename(file.file_item.name))),
'Content-Type: %s' % guess_mime(file.file_item.name)]
for key,value in file.headers.items():
pre.extend([to_str(key) + ': ' + to_str(value)])
pre.extend(['',
to_str(file.file_item.read())
]);
return pre;
|
mit
|
igurrutxaga/tvalacarta
|
python/main-classic/channels/fpt.py
|
3
|
4035
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# tvalacarta - XBMC Plugin
# Fútbol Para Todos
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#
# Autor: Juan Pablo Candioti (@JPCandioti)
# Desarrollo basado sobre otros canales de tvalacarta
#------------------------------------------------------------
import urlparse,re
import urllib
from core import logger
from core import scrapertools
from core.item import Item
DEBUG = True
CHANNELNAME = "fpt"
MAIN_URL = "http://www.futbolparatodos.com.ar"
def isGeneric():
return True
def mainlist(item):
logger.info("[" + CHANNELNAME + "] mainlist")
# Descargo la página de la sección.
item.url = MAIN_URL
itemlist = []
data = scrapertools.cachePage(item.url)
data = scrapertools.find_single_match(data,"<h3>SECCIONES</h3>(.*?)</ul>")
patron = '<li><img[^<]+<a href="([^"]+)">([^<]+)</a></li>'
matches = re.compile(patron, re.DOTALL).findall(data)
for scrapedurl,scrapedtitle in matches:
title = scrapertools.htmlclean(scrapedtitle)
url = urlparse.urljoin(item.url,scrapedurl)
thumbnail = ""
plot = ""
itemlist.append( Item(channel=CHANNELNAME, action="videos", title=title, thumbnail=thumbnail, plot=plot, url=url) )
return itemlist
def videos(item):
logger.info("[" + CHANNELNAME + "] videos")
# Descargo la página de la sección.
data = scrapertools.cachePage(item.url)
if (DEBUG): logger.info(data)
tipo = scrapertools.get_match(item.url, '/seccion/(.*?)')
if tipo.endswith("/"):
tipo = tipo[0:-1]
try:
pagina_siguiente = scrapertools.get_match(data, '<div\s+id="?(\d+)"?\s+class="ultimo removerultimo".*?>')
except:
pagina_siguiente = ""
if (DEBUG): logger.info("pagina_siguiente=" + pagina_siguiente)
# Extraigo id, imagen, título y descripción del video.
'''
<li class=golitem>
<div style="position: relative;">
<a href="http://www.youtube.com/embed/evylJotWAkU?autoplay=1" class="lbpModal cboxElement" title="El amor para toda la vida – Godoy Cruz – Fútbol Para Todos">
<img src="http://img.youtube.com/vi/evylJotWAkU/0.jpg" width=210 alt="El amor para toda la vida – Godoy Cruz – Fútbol Para Todos"/>
<img src="http://img.futbolparatodos.com.ar/wp-content/uploads/transparent-play-player2.png" width=210 style="position:absolute; z-index: 1; left: -1px;" alt=play class=transpa>
</a>
</div>
<div class=golitemdetalle>14/03/2013</br>
<a href="http://www.futbolparatodos.com.ar/2013/03/14/el-amor-para-toda-la-vida-godoy-cruz-futbol-para-todos/">El amor para toda la vida – Godoy Cruz – Fútbol Para Todos</a>
</br></div></li>
'''
patron = '<li\s*class[^<]+'
patron += '<div\s*style="position: relative[^<]+'
patron += '<a\s*href="http://www.youtube.com/embed/(.{11})\?autoplay=1"[^<]+'
patron += '<img\s*.*?src="([^"]+)"[^<]+'
patron += '<img[^<]+'
patron += '</a[^<]+'
patron += '</div[^<]+'
patron += '<div\s*class=golitemdetalle>[^<]+</br[^<]+'
patron += '<a[^>]+>(.*?)</a></br>(.*?)</div></li>'
matches = re.compile(patron, re.DOTALL).findall(data)
itemlist = []
for id_video, ithumbnail, ititle, iplot in matches:
ititle2 = re.sub(r"&#?\w+;", "", ititle)
logger.info("[" + CHANNELNAME + "] title=" + ititle2)
# Añado el item del video al listado.
itemlist.append( Item(channel=CHANNELNAME, title=scrapertools.htmlclean(ititle2), action="play", server="youtube", url="http://www.youtube.com/watch?v="+id_video, thumbnail=ithumbnail, plot=iplot, folder=False) )
# Si existe una página siguiente entonces agrego un item de paginación.
if pagina_siguiente != "":
itemlist.append( Item(channel=CHANNELNAME, title=">> Página siguiente", action="videos", url=MAIN_URL+"/wp-content/themes/fpt2/jquery_cargar_videos.php?tipo="+tipo+"&desde="+str(int(pagina_siguiente)+1), folder=True) )
return itemlist
|
gpl-3.0
|
kubernetes-client/python
|
kubernetes/client/models/v1_limit_range.py
|
1
|
6534
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: release-1.18
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes.client.configuration import Configuration
class V1LimitRange(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'kind': 'str',
'metadata': 'V1ObjectMeta',
'spec': 'V1LimitRangeSpec'
}
attribute_map = {
'api_version': 'apiVersion',
'kind': 'kind',
'metadata': 'metadata',
'spec': 'spec'
}
def __init__(self, api_version=None, kind=None, metadata=None, spec=None, local_vars_configuration=None): # noqa: E501
"""V1LimitRange - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._kind = None
self._metadata = None
self._spec = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if kind is not None:
self.kind = kind
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
@property
def api_version(self):
"""Gets the api_version of this V1LimitRange. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1LimitRange. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1LimitRange.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1LimitRange. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def kind(self):
"""Gets the kind of this V1LimitRange. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1LimitRange. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1LimitRange.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1LimitRange. # noqa: E501
:type: str
"""
self._kind = kind
@property
def metadata(self):
"""Gets the metadata of this V1LimitRange. # noqa: E501
:return: The metadata of this V1LimitRange. # noqa: E501
:rtype: V1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1LimitRange.
:param metadata: The metadata of this V1LimitRange. # noqa: E501
:type: V1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""Gets the spec of this V1LimitRange. # noqa: E501
:return: The spec of this V1LimitRange. # noqa: E501
:rtype: V1LimitRangeSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""Sets the spec of this V1LimitRange.
:param spec: The spec of this V1LimitRange. # noqa: E501
:type: V1LimitRangeSpec
"""
self._spec = spec
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1LimitRange):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1LimitRange):
return True
return self.to_dict() != other.to_dict()
|
apache-2.0
|
ovilab/atomify
|
libs/lammps/tools/python/neb_combine.py
|
51
|
2170
|
#!/usr/bin/env python
# make new dump file by combining snapshots from multiple NEB replica dumps
# Syntax: neb_combine.py -switch arg(s) -switch arg(s) ...
# -o outfile = new dump file
# each snapshot has NEB atoms from all replicas
# -r dump1 dump2 ... = replica dump files of NEB atoms
# can be in any order
# -b dumpfile = background atoms (optional)
# first snapshot in this file used as static non-NEB atoms
import sys,os
path = os.environ["LAMMPS_PYTHON_TOOLS"]
sys.path.append(path)
from dump import dump
# parse args
outfile = ""
backfile = ""
rfiles = []
argv = sys.argv
iarg = 1
narg = len(argv)
while iarg < narg:
if argv[iarg] == "-o":
outfile = argv[iarg+1]
iarg += 2
elif argv[iarg] == "-b":
backfile = argv[iarg+1]
iarg += 2
elif argv[iarg] == "-r":
ilast = iarg + 1
while ilast < narg and argv[ilast][0] != '-': ilast += 1
rfiles = argv[iarg+1:ilast]
iarg = ilast
else: break
if iarg < narg or not outfile or not rfiles:
print "Syntax: neb_combine.py -o outfile -b backfile -r dump1 dump2 ..."
sys.exit()
if os.path.exists(outfile): os.remove(outfile)
# ntotal = total atoms in each snapshot
# reset IDs of atoms in each NEB dump file
ntotal = 0
d = []
for file in rfiles:
one = dump(file)
nnew = one.snaps[0].nselect
idvec = range(ntotal+1,ntotal+nnew+1)
one.setv("id",idvec)
ntotal += nnew
d.append(one)
# nback = additional atoms in each snapshot
# reset IDs of atoms in background file
if backfile:
back = dump(backfile)
t = back.time()
back.tselect.one(t[0])
nback = back.snaps[0].nselect
idvec = range(ntotal+1,ntotal+nback+1)
back.setv("id",idvec)
else: nback = 0
ntotal += nback
# write out each snapshot
# natoms = ntotal, by overwriting nselect
# add background atoms if requested
times = d[0].time()
for time in times:
d[0].tselect.one(time)
i = d[0].findtime(time)
hold = d[0].snaps[i].nselect
d[0].snaps[i].nselect = ntotal
d[0].write(outfile,1,1)
d[0].snaps[i].nselect = hold
for one in d[1:]:
one.tselect.one(time)
one.write(outfile,0,1)
if backfile: back.write(outfile,0,1)
|
gpl-3.0
|
johan--/asteroids
|
game/load.py
|
2
|
2097
|
# -*- coding: utf-8 *-*
import pyglet
from . import util
from . import resources
from . import physicalobject
from . import ship as ship_
import random
def asteroid(player_pos=(-1000,-1000), screensize=(800,600), score=0, *args, **kwargs):
"""spawns an asteroid at random position"""
pos = util.random_pos(screensize)
while util.distance(pos, player_pos) < 300:
pos = util.random_pos(screensize)
max_speed = 90 + score * 3
vel = [random.randrange(-max_speed, max_speed) for i in (0,1)]
#vel = [0,0]
rotation_speed = random.randrange(-250, 250)
img = random.sample(resources.asteroid_images, 1)[0]
asteroid = physicalobject.PhysicalObject(vel=vel, rotation_speed=rotation_speed, x=pos[0], y=pos[1],
img=img, screensize=screensize, *args, **kwargs)
return asteroid
def ship(screensize=(800,600), *args, **kwargs):
myship = ship_.Ship(img=resources.ship_image, rotation=-90, x=screensize[0]/2, y=screensize[1]/2,
thrust_image=resources.ship_thrust, screensize=screensize, *args, **kwargs)
return myship
def debris(screensize=(800,600), *args, **kwargs):
"""
A bunch of rock fullscreen images that shift right underground
"""
image = resources.debris_image
image.anchor_x = image.width
image.anchor_y = 0
debris = []
for i in (0, 1):
x0 = -i * screensize[0]
frame = physicalobject.ScaledMovingSprite(img=image, vel=(30, 0), x=x0, screensize=screensize, *args, **kwargs)
debris.append(frame)
return debris
class ClickableSprite(pyglet.sprite.Sprite):
def __init__(self, hook_function, *args, **kwargs):
super().__init__(*args, **kwargs)
self.hook_function = hook_function
def on_mouse_press(self, x, y, button, modifiers):
if not self.visible:
return
x0 = self.x - self.width / 2.0
x1 = x0 + self.width
y0 = self.y - self.height / 2.0
y1 = y0 + self.height
if (x0 <= x <= x1) and (y0 <= y <= y1):
self.visible = False
self.hook_function()
|
bsd-3-clause
|
alishakiba/kaggle-ndsb
|
configurations/bagging_17_cr4_ds.py
|
6
|
7849
|
import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import dihedral_fast
import tmp_dnn
import tta
validation_split_path = "splits/bagging_split_17.pkl"
patch_sizes = [(95, 95), (47, 47)]
augmentation_params = {
'zoom_range': (1 / 1.6, 1.6),
'rotation_range': (0, 360),
'shear_range': (-20, 20),
'translation_range': (-10, 10),
'do_flip': True,
'allow_stretch': 1.3,
}
batch_size = 128 // 4
chunk_size = 32768 // 4
num_chunks_train = 840
momentum = 0.9
learning_rate_schedule = {
0: 0.003,
700: 0.0003,
800: 0.00003,
}
validate_every = 20
save_every = 20
def estimate_scale(img):
return np.maximum(img.shape[0], img.shape[1]) / 85.0
scale_factors = [estimate_scale, 5.0] # combine size-based rescaling + fixed rescaling
augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{
'zoom_range': (1 / 1.4, 1.4),
'rotation_range': (0, 360),
'shear_range': (-10, 10),
'translation_range': (-8, 8),
'do_flip': True,
'allow_stretch': 1.2,
})
data_loader = load.ZmuvMultiscaleDataLoader(scale_factors=scale_factors, num_chunks_train=num_chunks_train,
patch_sizes=patch_sizes, chunk_size=chunk_size, augmentation_params=augmentation_params,
augmentation_transforms_test=augmentation_transforms_test, validation_split_path=validation_split_path)
# Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer
# MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer
Conv2DLayer = tmp_dnn.Conv2DDNNLayer
MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer
def build_model():
# variable scale part
l0_variable = nn.layers.InputLayer((batch_size, 1, patch_sizes[0][0], patch_sizes[0][1]))
l0c = dihedral.CyclicSliceLayer(l0_variable)
l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral_fast.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral_fast.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral_fast.CyclicConvRollLayer(l3)
l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2))
l4r = dihedral_fast.CyclicConvRollLayer(l4)
l4f = nn.layers.flatten(l4r)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=1024, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l5fp = nn.layers.FeaturePoolLayer(l5, ds=2)
l5m = dihedral.CyclicPoolLayer(l5fp, pool_function=nn_plankton.rms)
l6 = nn.layers.DenseLayer(nn.layers.dropout(l5m, p=0.5), num_units=1024, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l6fp = nn.layers.FeaturePoolLayer(l6, ds=2)
l_variable = l6fp
# fixed scale part
l0_fixed = nn.layers.InputLayer((batch_size, 1, patch_sizes[1][0], patch_sizes[1][1]))
l0c = dihedral.CyclicSliceLayer(l0_fixed)
l1a = Conv2DLayer(l0c, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1b = Conv2DLayer(l1a, num_filters=8, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral_fast.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2b = Conv2DLayer(l2a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral_fast.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3b = Conv2DLayer(l3a, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3c = Conv2DLayer(l3b, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral_fast.CyclicConvRollLayer(l3)
l3f = nn.layers.flatten(l3r)
l4 = nn.layers.DenseLayer(nn.layers.dropout(l3f, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l4fp = nn.layers.FeaturePoolLayer(l4, ds=2)
l4m = dihedral.CyclicPoolLayer(l4fp, pool_function=nn_plankton.rms)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4m, p=0.5), num_units=512, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu)
l5fp = nn.layers.FeaturePoolLayer(l5, ds=2)
l_fixed = l5fp
# merge the parts
l_merged = nn.layers.concat([l_variable, l_fixed])
l7 = nn.layers.DenseLayer(nn.layers.dropout(l_merged, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0))
return [l0_variable, l0_fixed], l7
def build_objective(l_ins, l_out):
lambda_reg = 0.0005
params = nn.layers.get_all_non_bias_params(l_out)
reg_term = sum(T.sum(p**2) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + lambda_reg * reg_term
return nn.objectives.Objective(l_out, loss_function=loss)
|
mit
|
h42i/trafficlight
|
main.py
|
1
|
2880
|
import network
import time
from umqtt.simple import MQTTClient
from lights import *
from traffic import *
# Config
wifi_ssid = "HaSi-Kein-Internet-Legacy"
wifi_psk = "bugsbunny"
mqtt_server = "mqtt.hasi"
mqtt_client_name = "traffic_light"
mqtt_topic = "hasi/lights/traffic_light"
# State
lights_on = True
mqtt_client = None
snmp_traffic = None
light = None
wlan = None
# Set everything up
def setup():
global wifi_ssid
global wifi_psk
global mqtt_topic
global mqtt_server
global mqtt_client
global mqtt_client_name
global snmp_traffic
global light
global wlan
# Setup Network
wlan = network.WLAN(network.STA_IF)
wlan.active(True)
if not wlan.isconnected():
print('connecting to network...')
wlan.connect(wifi_ssid, wifi_psk)
while not wlan.isconnected():
pass
print('network config:', wlan.ifconfig())
# Setup MQTT
mqtt_client = MQTTClient(mqtt_client_name, mqtt_server)
mqtt_client.set_callback(mqtt_callback)
mqtt_client.connect()
mqtt_client.subscribe(bytes(mqtt_topic, "utf-8"))
# Setup remaining stuff
snmp_traffic = Traffic()
light = Lights()
# MQTT-Callback; Triggered by c.check_msg()
def mqtt_callback(topic, msg):
global lights_on
message = str(msg, "utf-8")
if message == "on":
lights_on = True
elif message == "off":
lights_on = False
# Test routine
def test_lights():
global light
print('testing...')
light.set_all_color((0, 0, 0, 0))
time.sleep(0.5)
light.set_low_load()
time.sleep(0.2)
light.set_middle_load()
time.sleep(0.2)
light.set_high_load()
time.sleep(0.2)
light.set_all_color((255, 255, 255, 31))
time.sleep(0.2)
light.set_all_color((0, 0, 0, 0))
time.sleep(0.2)
light.set_all_color((255, 255, 255, 31))
time.sleep(0.2)
light.set_all_color((0, 0, 0, 0))
light.set_low_load()
print('testing complete')
# Work work work...
def loop():
global mqtt_client
global lights_on
global light
global snmp_traffic
global wlan
while True:
if wlan.isconnected():
#check for mqtt-messages
time.sleep(1)
mqtt_client.check_msg()
if not lights_on:
light.set_all_color((0, 0, 0, 0))
else:
traffic = snmp_traffic.get_traffic()
if traffic != 0 and traffic != None:
if traffic < 2:
light.set_low_load()
elif traffic < 10:
light.set_middle_load()
elif traffic < 14:
light.set_high_load()
else:
light.set_chaos_load()
else:
setup()
setup()
test_lights()
loop()
mqtt_client.disconnect()
|
mit
|
pombreda/tipfy
|
tests/ext_mako_test.py
|
4
|
2712
|
# -*- coding: utf-8 -*-
"""
Tests for tipfyext.mako
"""
import os
import sys
import unittest
from tipfy import RequestHandler, Request, Response, Tipfy
from tipfy.app import local
from tipfyext.mako import Mako, MakoMixin
import test_utils
current_dir = os.path.abspath(os.path.dirname(__file__))
templates_dir = os.path.join(current_dir, 'resources', 'mako_templates')
class TestMako(test_utils.BaseTestCase):
def test_render_template(self):
app = Tipfy(config={'tipfyext.mako': {'templates_dir': templates_dir}})
request = Request.from_values()
handler = RequestHandler(app, request)
mako = Mako(app)
message = 'Hello, World!'
res = mako.render_template(handler, 'template1.html', message=message)
self.assertEqual(res, message + '\n')
def test_render_response(self):
app = Tipfy(config={'tipfyext.mako': {'templates_dir': templates_dir}})
request = Request.from_values()
handler = RequestHandler(app, request)
mako = Mako(app)
message = 'Hello, World!'
response = mako.render_response(handler, 'template1.html', message=message)
self.assertEqual(isinstance(response, Response), True)
self.assertEqual(response.mimetype, 'text/html')
self.assertEqual(response.data, message + '\n')
def test_mako_mixin_render_template(self):
class MyHandler(RequestHandler, MakoMixin):
def __init__(self, app, request):
self.app = app
self.request = request
self.context = {}
app = Tipfy(config={'tipfyext.mako': {'templates_dir': templates_dir}})
request = Request.from_values()
handler = MyHandler(app, request)
mako = Mako(app)
message = 'Hello, World!'
response = handler.render_template('template1.html', message=message)
self.assertEqual(response, message + '\n')
def test_mako_mixin_render_response(self):
class MyHandler(RequestHandler, MakoMixin):
def __init__(self, app, request):
self.app = app
self.request = request
self.context = {}
app = Tipfy(config={'tipfyext.mako': {'templates_dir': templates_dir}})
request = Request.from_values()
handler = MyHandler(app, request)
mako = Mako(app)
message = 'Hello, World!'
response = handler.render_response('template1.html', message=message)
self.assertEqual(isinstance(response, Response), True)
self.assertEqual(response.mimetype, 'text/html')
self.assertEqual(response.data, message + '\n')
if __name__ == '__main__':
test_utils.main()
|
bsd-3-clause
|
cnsoft/kbengine-cocos2dx
|
kbe/res/scripts/common/Lib/unittest/util.py
|
794
|
4157
|
"""Various utility functions."""
from collections import namedtuple, OrderedDict
__unittest = True
_MAX_LENGTH = 80
def safe_repr(obj, short=False):
try:
result = repr(obj)
except Exception:
result = object.__repr__(obj)
if not short or len(result) < _MAX_LENGTH:
return result
return result[:_MAX_LENGTH] + ' [truncated]...'
def strclass(cls):
return "%s.%s" % (cls.__module__, cls.__name__)
def sorted_list_difference(expected, actual):
"""Finds elements in only one or the other of two, sorted input lists.
Returns a two-element tuple of lists. The first list contains those
elements in the "expected" list but not in the "actual" list, and the
second contains those elements in the "actual" list but not in the
"expected" list. Duplicate elements in either input list are ignored.
"""
i = j = 0
missing = []
unexpected = []
while True:
try:
e = expected[i]
a = actual[j]
if e < a:
missing.append(e)
i += 1
while expected[i] == e:
i += 1
elif e > a:
unexpected.append(a)
j += 1
while actual[j] == a:
j += 1
else:
i += 1
try:
while expected[i] == e:
i += 1
finally:
j += 1
while actual[j] == a:
j += 1
except IndexError:
missing.extend(expected[i:])
unexpected.extend(actual[j:])
break
return missing, unexpected
def unorderable_list_difference(expected, actual):
"""Same behavior as sorted_list_difference but
for lists of unorderable items (like dicts).
As it does a linear search per item (remove) it
has O(n*n) performance."""
missing = []
while expected:
item = expected.pop()
try:
actual.remove(item)
except ValueError:
missing.append(item)
# anything left in actual is unexpected
return missing, actual
def three_way_cmp(x, y):
"""Return -1 if x < y, 0 if x == y and 1 if x > y"""
return (x > y) - (x < y)
_Mismatch = namedtuple('Mismatch', 'actual expected value')
def _count_diff_all_purpose(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements need not be hashable
s, t = list(actual), list(expected)
m, n = len(s), len(t)
NULL = object()
result = []
for i, elem in enumerate(s):
if elem is NULL:
continue
cnt_s = cnt_t = 0
for j in range(i, m):
if s[j] == elem:
cnt_s += 1
s[j] = NULL
for j, other_elem in enumerate(t):
if other_elem == elem:
cnt_t += 1
t[j] = NULL
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for i, elem in enumerate(t):
if elem is NULL:
continue
cnt_t = 0
for j in range(i, n):
if t[j] == elem:
cnt_t += 1
t[j] = NULL
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
def _ordered_count(iterable):
'Return dict of element counts, in the order they were first seen'
c = OrderedDict()
for elem in iterable:
c[elem] = c.get(elem, 0) + 1
return c
def _count_diff_hashable(actual, expected):
'Returns list of (cnt_act, cnt_exp, elem) triples where the counts differ'
# elements must be hashable
s, t = _ordered_count(actual), _ordered_count(expected)
result = []
for elem, cnt_s in s.items():
cnt_t = t.get(elem, 0)
if cnt_s != cnt_t:
diff = _Mismatch(cnt_s, cnt_t, elem)
result.append(diff)
for elem, cnt_t in t.items():
if elem not in s:
diff = _Mismatch(0, cnt_t, elem)
result.append(diff)
return result
|
lgpl-3.0
|
arcticshores/kivy
|
examples/canvas/tesselate.py
|
30
|
4477
|
'''
Tesselate Demonstration
=======================
This demonstrates the experimental library for tesselating polygons. You
should see a hollow square with some buttons below it. You can click and
drag to create additional shapes, watching the number of vertexes and elements
at the top of the screen. The 'debug' button toggles showing the mesh in
different colors.
'''
from kivy.app import App
from kivy.graphics import Mesh, Color
from kivy.graphics.tesselator import Tesselator, WINDING_ODD, TYPE_POLYGONS
from kivy.uix.widget import Widget
from kivy.uix.floatlayout import FloatLayout
from kivy.lang import Builder
from kivy.logger import Logger
Builder.load_string("""
<ShapeBuilder>:
BoxLayout:
size_hint_y: None
height: "48dp"
spacing: "2dp"
padding: "2dp"
ToggleButton:
text: "Debug"
id: debug
on_release: root.build()
Button:
text: "New shape"
on_release: root.push_shape()
Button:
text: "Build"
on_release: root.build()
Button:
text: "Reset"
on_release: root.reset()
BoxLayout:
size_hint_y: None
height: "48dp"
top: root.top
spacing: "2dp"
padding: "2dp"
Label:
id: status
text: "Status"
""")
class ShapeBuilder(FloatLayout):
def __init__(self, **kwargs):
super(ShapeBuilder, self).__init__(**kwargs)
self.shapes = [
[100, 100, 300, 100, 300, 300, 100, 300],
[150, 150, 250, 150, 250, 250, 150, 250]
] # the 'hollow square' shape
self.shape = []
self.build()
def on_touch_down(self, touch):
if super(ShapeBuilder, self).on_touch_down(touch):
return True
Logger.info('tesselate: on_touch_down (%5.2f, %5.2f)' % touch.pos)
self.shape.extend(touch.pos)
self.build()
return True
def on_touch_move(self, touch):
if super(ShapeBuilder, self).on_touch_move(touch):
return True
Logger.info('tesselate: on_touch_move (%5.2f, %5.2f)' % touch.pos)
self.shape.extend(touch.pos)
self.build()
return True
def on_touch_up(self, touch):
if super(ShapeBuilder, self).on_touch_up(touch):
return True
Logger.info('tesselate: on_touch_up (%5.2f, %5.2f)' % touch.pos)
self.push_shape()
self.build()
def push_shape(self):
self.shapes.append(self.shape)
self.shape = []
def build(self):
tess = Tesselator()
count = 0
for shape in self.shapes:
if len(shape) >= 3:
tess.add_contour(shape)
count += 1
if self.shape and len(self.shape) >= 3:
tess.add_contour(self.shape)
count += 1
if not count:
return
ret = tess.tesselate(WINDING_ODD, TYPE_POLYGONS)
Logger.info('tesselate: build: tess.tesselate returns {}'.format(ret))
self.canvas.after.clear()
debug = self.ids.debug.state == "down"
if debug:
from random import random
with self.canvas.after:
c = 0
for vertices, indices in tess.meshes:
Color(c, 1, 1, mode="hsv")
c += 0.3
indices = [0]
for i in range(1, len(vertices) / 4):
if i > 0:
indices.append(i)
indices.append(i)
indices.append(0)
indices.append(i)
indices.pop(-1)
Mesh(vertices=vertices, indices=indices, mode="lines")
else:
with self.canvas.after:
Color(1, 1, 1, 1)
for vertices, indices in tess.meshes:
Mesh(vertices=vertices, indices=indices,
mode="triangle_fan")
self.ids.status.text = "Shapes: {} - Vertex: {} - Elements: {}".format(
count, tess.vertex_count, tess.element_count)
def reset(self):
self.shapes = []
self.shape = []
self.ids.status.text = "Shapes: {} - Vertex: {} - Elements: {}".format(
0, 0, 0)
self.canvas.after.clear()
class TessApp(App):
def build(self):
return ShapeBuilder()
TessApp().run()
|
mit
|
fangdingjun/shadowsocks
|
shadowsocks/crypto/rc4_md5.py
|
1042
|
1339
|
#!/usr/bin/env python
#
# Copyright 2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import hashlib
from shadowsocks.crypto import openssl
__all__ = ['ciphers']
def create_cipher(alg, key, iv, op, key_as_bytes=0, d=None, salt=None,
i=1, padding=1):
md5 = hashlib.md5()
md5.update(key)
md5.update(iv)
rc4_key = md5.digest()
return openssl.OpenSSLCrypto(b'rc4', rc4_key, b'', op)
ciphers = {
'rc4-md5': (16, 16, create_cipher),
}
def test():
from shadowsocks.crypto import util
cipher = create_cipher('rc4-md5', b'k' * 32, b'i' * 16, 1)
decipher = create_cipher('rc4-md5', b'k' * 32, b'i' * 16, 0)
util.run_cipher(cipher, decipher)
if __name__ == '__main__':
test()
|
apache-2.0
|
badloop/SickRage
|
lib/tmdb_api/tmdb_api.py
|
57
|
29696
|
"""
tmdbsimple.py is a wrapper for The Movie Database API.
Refer to the official API documentation for more information.
http://docs.themoviedb.apiary.io/
Created by Celia Oakley on 2013-10-31.
"""
import json
import requests
class TMDB:
def __init__(self, api_key, version=3):
TMDB.api_key = str(api_key)
TMDB.url = 'https://api.themoviedb.org' + '/' + str(version)
@staticmethod
def _request(method, path, params={}, json_body={}):
url = TMDB.url + '/' + path + '?api_key=' + TMDB.api_key
if method == 'GET':
headers = {'Accept': 'application/json'}
content = requests.get(url, params=params, headers=headers).content
elif method == 'POST':
for key in params.keys():
url += '&' + key + '=' + params[key]
headers = {'Content-Type': 'application/json', \
'Accept': 'application/json'}
content = requests.post(url, data=json.dumps(json_body), \
headers=headers).content
elif method == 'DELETE':
for key in params.keys():
url += '&' + key + '=' + params[key]
headers = {'Content-Type': 'application/json', \
'Accept': 'application/json'}
content = requests.delete(url, data=json.dumps(json_body), \
headers=headers).content
else:
raise Exception('method: ' + method + ' not supported.')
response = json.loads(content.decode('utf-8'))
return response
#
# Set attributes to dictionary values.
# - e.g.
# >>> tmdb = TMDB()
# >>> movie = tmdb.Movie(103332)
# >>> response = movie.info()
# >>> movie.title # instead of response['title']
#
@staticmethod
def _set_attrs_to_values(object, response={}):
for key in response.keys():
setattr(object, key, response[key])
#
# Configuration
# http://docs.themoviedb.apiary.io/#configuration
#
class Configuration:
def __init__(self):
pass
def info(self):
path = 'configuration'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Account
# http://docs.themoviedb.apiary.io/#account
#
class Account:
def __init__(self, session_id):
self.session_id = session_id
# need to call this first to set account id
def info(self):
path = 'account'
params = {'session_id': self.session_id}
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def lists(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/lists'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def favorite_movies(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/favorite_movies'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: movie_id, favorite
def favorite(self, json_body):
path = 'account' + '/' + str(json_body['movie_id']) + '/favorite'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def rated_movies(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/rated_movies'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, sort_by, sort_order, language
def movie_watchlist(self, params={}):
path = 'account' + '/' + str(self.session_id) + '/movie_watchlist'
params['session_id'] = self.session_id
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: movie_id, movie_watchlist
def movie_watchlist_post(self, json_body):
path = 'account' + '/' + str(json_body['movie_id']) + \
'/movie_watchlist'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
#
# Authentication
# http://docs.themoviedb.apiary.io/#authentication
#
# Note: to use authentication to access a user account, see:
# https://www.themoviedb.org/documentation/api/sessions
#
class Authentication:
def __init__(self):
pass
def token_new(self):
path = 'authentication/token/new'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: request_token
def session_new(self, params):
path = 'authentication/session/new'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def guest_session_new(self):
path = 'authentication/guest_session/new'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Changes
# http://docs.themoviedb.apiary.io/#changes
#
class Changes:
def __init__(self):
pass
# optional parameters: page, start_date, end_date
def movie(self, params={}):
path = 'movie/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, start_date, end_date
def person(self, params={}):
path = 'person/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Collections
# http://docs.themoviedb.apiary.io/#collections
#
class Collections:
def __init__(self, id):
self.id = id
# optional parameter: language
def info(self, params={}):
path = 'collection' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'collection' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Companies
# http://docs.themoviedb.apiary.io/#companies
#
class Companies:
def __init__(self, id=0):
self.id = id
def info(self):
path = 'company' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def movies(self, params={}):
path = 'company' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Credits
# http://docs.themoviedb.apiary.io/#credits
#
class Credits:
def __init__(self, credit_id):
self.credit_id = credit_id
# optional parameters: language
def info(self, params={}):
path = 'credit' + '/' + str(self.credit_id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Discover
# http://docs.themoviedb.apiary.io/#discover
#
class Discover:
def __init__(self):
pass
# optional parameters: page, language, sort_by, include_adult, year,
# primary_release_year, vote_count.gte, vote_average.gte, with_genres,
# release_date.gte, release_date.lte, certification_country,
# certification.lte, with_companies
def movie(self, params):
path = 'discover/movie'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language, sort_by, first_air_date_year,
# vote_count.gte, vote_average.gte, with_genres, with_networks,
# first_air_date.gte, first_air_date.lte
def tv(self, params):
path = 'discover/tv'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Find
# http://docs.themoviedb.apiary.io/#find
#
class Find:
def __init__(self, id=0):
self.id = id
# required parameters: external_source
def info(self, params={}):
path = 'find' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Genres
# http://docs.themoviedb.apiary.io/#genres
#
class Genres:
def __init__(self, id=0):
self.id = id
# optional parameters: language
def list(self, params={}):
path = 'genre/list'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language, include_all_movies, include_adult
def movies(self, params={}):
path = 'genre' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Jobs
# http://docs.themoviedb.apiary.io/#jobs
#
class Jobs:
def __init__(self):
pass
def list(self):
path = 'job/list'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Keywords
# http://docs.themoviedb.apiary.io/#keywords
#
class Keywords:
def __init__(self, id):
self.id = id
def info(self):
path = 'keyword' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def movies(self, params={}):
path = 'keyword' + '/' + str(self.id) + '/movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Lists
# http://docs.themoviedb.apiary.io/#lists
#
class Lists:
def __init__(self, id=0, session_id=0):
self.id = id
self.session_id = session_id
def info(self):
path = 'list' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: movie_id
def item_status(self, params):
path = 'list' + '/' + str(self.id) + '/item_status'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: name, description
# optional JSON body: language
def create_list(self, json_body):
path = 'list'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: media_id
def add_item(self, json_body):
path = 'list' + '/' + str(self.id) + '/add_item'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
# required JSON body: media_id
def remove_item(self, json_body):
path = 'list' + '/' + str(self.id) + '/remove_item'
params = {'session_id': self.session_id}
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
def delete_list(self):
path = 'list' + '/' + str(self.id)
params = {'session_id': self.session_id}
response = TMDB._request('DELETE', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# Movies
# http://docs.themoviedb.apiary.io/#movies
#
class Movies:
""" """
def __init__(self, id=0):
self.id = id
# optional parameters: language
def info(self, params={}):
path = 'movie' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: country
def alternative_titles(self, params={}):
path = 'movie' + '/' + str(self.id) + '/alternative_titles'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self):
path = 'movie' + '/' + str(self.id) + '/credits'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'movie' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def keywords(self):
path = 'movie' + '/' + str(self.id) + '/keywords'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def releases(self):
path = 'movie' + '/' + str(self.id) + '/releases'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def trailers(self):
path = 'movie' + '/' + str(self.id) + '/trailers'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
def translations(self):
path = 'movie' + '/' + str(self.id) + '/translations'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def similar_movies(self, params={}):
path = 'movie' + '/' + str(self.id) + '/similar_movies'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def reviews(self, params={}):
path = 'movie' + '/' + str(self.id) + '/reviews'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def lists(self, params={}):
path = 'movie' + '/' + str(self.id) + '/lists'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: start_date, end_date
def changes(self, params={}):
path = 'movie' + '/' + str(self.id) + '/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def latest(self):
path = 'movie/latest'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def upcoming(self, params={}):
path = 'movie/upcoming'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def now_playing(self, params={}):
path = 'movie/now_playing'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def popular(self, params={}):
path = 'movie/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page, language
def top_rated(self, params={}):
path = 'movie/top_rated'
response = TMDB._request('GET', 'movie' + '/top_rated', params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: session_id
def account_states(self, params):
path = 'movie' + '/' + str(self.id) + '/account_states'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: session_id or guest_session_id
# required JSON body: value
def rating(self, params, json_body):
path = 'movie' + '/' + str(self.id) + '/rating'
response = TMDB._request('POST', path, params, json_body)
TMDB._set_attrs_to_values(self, response)
return response
#
# Networks
# http://docs.themoviedb.apiary.io/#networks
#
class Networks:
def __init__(self, id):
self.id = id
def info(self):
path = 'network' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# People
# http://docs.themoviedb.apiary.io/#people
#
class People:
def __init__(self, id=0):
self.id = id
def info(self):
path = 'person' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def movie_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/movie_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def tv_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/tv_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language
def combined_credits(self, params={}):
path = 'person' + '/' + str(self.id) + '/combined_credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def images(self):
path = 'person' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: start_date, end_date
def changes(self, params={}):
path = 'person' + '/' + str(self.id) + '/changes'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: page
def popular(self, params={}):
path = 'person/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def latest(self):
path = 'person/latest'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Reviews
# http://docs.themoviedb.apiary.io/#reviews
#
class Reviews:
def __init__(self, id):
self.id = id
def info(self):
path = 'review' + '/' + str(self.id)
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
#
# Search
# http://docs.themoviedb.apiary.io/#search
#
class Search:
def __init__(self):
pass
# required parameters: query
# optional parameters: page, language, include_adult, year,
# primary_release_year, search_type
def movie(self, params):
path = 'search/movie'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, language
def collection(self, params):
path = 'search/collection'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, language, first_air_date_year, search_type
def tv(self, params):
path = 'search/tv'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, include_adult, search_type
def person(self, params):
path = 'search/person'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page, include_adult
def list(self, params):
path = 'search/list'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page
def company(self, params):
path = 'search/company'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# required parameters: query
# optional parameters: page
def keyword(self, params):
path = 'search/keyword'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV
# http://docs.themoviedb.apiary.io/#tv
#
class TV:
def __init__(self, id=0):
self.id = id
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def credits(self, params={}):
path = 'tv' + '/' + str(self.id) + '/credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameters: language, include_image_language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def translations(self):
path = 'tv' + '/' + str(self.id) + '/translations'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: page, language
def top_rated(self, params={}):
path = 'tv/top_rated'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: page, language
def popular(self, params={}):
path = 'tv/popular'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV Seasons
# http://docs.themoviedb.apiary.io/#tvseasons
#
class TV_Seasons:
def __init__(self, id, season_number):
self.id = id
self.season_number = season_number
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/credits'
response = TMDB._request('GET', path)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
#
# TV Episodes
# http://docs.themoviedb.apiary.io/#tvepisodes
#
class TV_Episodes:
def __init__(self, id, season_number, episode_number):
self.id = id
self.season_number = season_number
self.episode_number = episode_number
# optional parameter: language
def info(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number)
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
def credits(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/credits'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def external_ids(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/external_ids'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
# optional parameter: language
def images(self, params={}):
path = 'tv' + '/' + str(self.id) + '/season' + \
'/' + str(self.season_number) + '/episode' + \
'/' + str(self.episode_number) + '/images'
response = TMDB._request('GET', path, params)
TMDB._set_attrs_to_values(self, response)
return response
|
gpl-3.0
|
gregsymons/jenkins-job-builder
|
jenkins_jobs/modules/properties.py
|
8
|
22619
|
# Copyright 2012 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
The Properties module supplies a wide range of options that are
implemented as Jenkins job properties.
**Component**: properties
:Macro: property
:Entry Point: jenkins_jobs.properties
Example::
job:
name: test_job
properties:
- github:
url: https://github.com/openstack-infra/jenkins-job-builder/
"""
import xml.etree.ElementTree as XML
import jenkins_jobs.modules.base
from jenkins_jobs.errors import InvalidAttributeError, JenkinsJobsException
import logging
def builds_chain_fingerprinter(parser, xml_parent, data):
"""yaml: builds-chain-fingerprinter
Builds chain fingerprinter.
Requires the Jenkins :jenkins-wiki:`Builds chain fingerprinter Plugin
<Builds+chain+fingerprinter>`.
:arg bool per-builds-chain: enable builds hierarchy fingerprinting
(default False)
:arg bool per-job-chain: enable jobs hierarchy fingerprinting
(default False)
Example:
.. literalinclude:: /../../tests/properties/fixtures/fingerprinter.yaml
"""
fingerprinter = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.'
'buildschainfingerprinter.'
'AutomaticFingerprintJobProperty')
XML.SubElement(fingerprinter, 'isPerBuildsChainEnabled').text = str(
data.get('per-builds-chain', False)).lower()
XML.SubElement(fingerprinter, 'isPerJobsChainEnabled').text = str(
data.get('per-job-chain', False)).lower()
def ownership(parser, xml_parent, data):
"""yaml: ownership
Plugin provides explicit ownership for jobs and slave nodes.
Requires the Jenkins :jenkins-wiki:`Ownership Plugin <Ownership+Plugin>`.
:arg bool enabled: whether ownership enabled (default : true)
:arg str owner: the owner of job
:arg list co-owners: list of job co-owners
Example:
.. literalinclude:: /../../tests/properties/fixtures/ownership.yaml
"""
ownership_plugin = \
XML.SubElement(xml_parent,
'com.synopsys.arc.'
'jenkins.plugins.ownership.jobs.JobOwnerJobProperty')
ownership = XML.SubElement(ownership_plugin, 'ownership')
owner = str(data.get('enabled', True)).lower()
XML.SubElement(ownership, 'ownershipEnabled').text = owner
XML.SubElement(ownership, 'primaryOwnerId').text = data.get('owner')
coownersIds = XML.SubElement(ownership, 'coownersIds')
for coowner in data.get('co-owners', []):
XML.SubElement(coownersIds, 'string').text = coowner
def promoted_build(parser, xml_parent, data):
"""yaml: promoted-build
Marks a build for promotion. A promotion process with an identical
name must be created via the web interface in the job in order for the job
promotion to persist. Promotion processes themselves cannot be configured
by jenkins-jobs due to the separate storage of plugin configuration files.
Requires the Jenkins :jenkins-wiki:`Promoted Builds Plugin
<Promoted+Builds+Plugin>`.
:arg list names: the promoted build names
Example::
properties:
- promoted-build:
names:
- "Release to QA"
- "Jane Must Approve"
"""
promoted = XML.SubElement(xml_parent, 'hudson.plugins.promoted__builds.'
'JobPropertyImpl')
names = data.get('names', [])
if names:
active_processes = XML.SubElement(promoted, 'activeProcessNames')
for n in names:
XML.SubElement(active_processes, 'string').text = str(n)
def github(parser, xml_parent, data):
"""yaml: github
Sets the GitHub URL for the project.
:arg str url: the GitHub URL
Example::
properties:
- github:
url: https://github.com/openstack-infra/jenkins-job-builder/
"""
github = XML.SubElement(xml_parent,
'com.coravy.hudson.plugins.github.'
'GithubProjectProperty')
github_url = XML.SubElement(github, 'projectUrl')
github_url.text = data['url']
def least_load(parser, xml_parent, data):
"""yaml: least-load
Enables the Least Load Plugin.
Requires the Jenkins :jenkins-wiki:`Least Load Plugin <Least+Load+Plugin>`.
:arg bool disabled: whether or not leastload is disabled (default True)
Example:
.. literalinclude:: /../../tests/properties/fixtures/least-load002.yaml
"""
least = XML.SubElement(xml_parent,
'org.bstick12.jenkinsci.plugins.leastload.'
'LeastLoadDisabledProperty')
XML.SubElement(least, 'leastLoadDisabled').text = str(
data.get('disabled', True)).lower()
def throttle(parser, xml_parent, data):
"""yaml: throttle
Throttles the number of builds for this job.
Requires the Jenkins :jenkins-wiki:`Throttle Concurrent Builds Plugin
<Throttle+Concurrent+Builds+Plugin>`.
:arg int max-per-node: max concurrent builds per node (default 0)
:arg int max-total: max concurrent builds (default 0)
:arg bool enabled: whether throttling is enabled (default True)
:arg str option: throttle `project` or `category`
:arg list categories: multiproject throttle categories
Example::
properties:
- throttle:
max-total: 4
categories:
- cat1
- cat2
"""
throttle = XML.SubElement(xml_parent,
'hudson.plugins.throttleconcurrents.'
'ThrottleJobProperty')
XML.SubElement(throttle, 'maxConcurrentPerNode').text = str(
data.get('max-per-node', '0'))
XML.SubElement(throttle, 'maxConcurrentTotal').text = str(
data.get('max-total', '0'))
# TODO: What's "categories"?
# XML.SubElement(throttle, 'categories')
if data.get('enabled', True):
XML.SubElement(throttle, 'throttleEnabled').text = 'true'
else:
XML.SubElement(throttle, 'throttleEnabled').text = 'false'
cat = data.get('categories', [])
if cat:
cn = XML.SubElement(throttle, 'categories')
for c in cat:
XML.SubElement(cn, 'string').text = str(c)
XML.SubElement(throttle, 'throttleOption').text = data.get('option')
XML.SubElement(throttle, 'configVersion').text = '1'
def sidebar(parser, xml_parent, data):
"""yaml: sidebar
Allows you to add links in the sidebar.
Requires the Jenkins :jenkins-wiki:`Sidebar-Link Plugin
<Sidebar-Link+Plugin>`.
:arg str url: url to link to (optional)
:arg str text: text for the link (optional)
:arg str icon: path to icon (optional)
Example:
.. literalinclude:: /../../tests/properties/fixtures/sidebar02.yaml
"""
sidebar = xml_parent.find('hudson.plugins.sidebar__link.ProjectLinks')
if sidebar is None:
sidebar = XML.SubElement(xml_parent,
'hudson.plugins.sidebar__link.ProjectLinks')
links = XML.SubElement(sidebar, 'links')
else:
links = sidebar.find('links')
action = XML.SubElement(links, 'hudson.plugins.sidebar__link.LinkAction')
XML.SubElement(action, 'url').text = str(data.get('url', ''))
XML.SubElement(action, 'text').text = str(data.get('text', ''))
XML.SubElement(action, 'icon').text = str(data.get('icon', ''))
def inject(parser, xml_parent, data):
"""yaml: inject
Allows you to inject environment variables into the build.
Requires the Jenkins :jenkins-wiki:`Env Inject Plugin <EnvInject+Plugin>`.
:arg str properties-file: file to read with properties (optional)
:arg str properties-content: key=value properties (optional)
:arg str script-file: file with script to run (optional)
:arg str script-content: script to run (optional)
:arg str groovy-content: groovy script to run (optional)
:arg bool load-from-master: load files from master (default false)
:arg bool enabled: injection enabled (default true)
:arg bool keep-system-variables: keep system variables (default true)
:arg bool keep-build-variables: keep build variable (default true)
:arg bool override-build-parameters: override build parameters
(default false)
Example:
.. literalinclude:: /../../tests/properties/fixtures/inject001.yaml
:language: yaml
"""
inject = XML.SubElement(xml_parent,
'EnvInjectJobProperty')
info = XML.SubElement(inject, 'info')
jenkins_jobs.modules.base.add_nonblank_xml_subelement(
info, 'propertiesFilePath', data.get('properties-file'))
jenkins_jobs.modules.base.add_nonblank_xml_subelement(
info, 'propertiesContent', data.get('properties-content'))
jenkins_jobs.modules.base.add_nonblank_xml_subelement(
info, 'scriptFilePath', data.get('script-file'))
jenkins_jobs.modules.base.add_nonblank_xml_subelement(
info, 'scriptContent', data.get('script-content'))
jenkins_jobs.modules.base.add_nonblank_xml_subelement(
info, 'groovyScriptContent', data.get('groovy-content'))
XML.SubElement(info, 'loadFilesFromMaster').text = str(
data.get('load-from-master', False)).lower()
XML.SubElement(inject, 'on').text = str(
data.get('enabled', True)).lower()
XML.SubElement(inject, 'keepJenkinsSystemVariables').text = str(
data.get('keep-system-variables', True)).lower()
XML.SubElement(inject, 'keepBuildVariables').text = str(
data.get('keep-build-variables', True)).lower()
XML.SubElement(inject, 'overrideBuildParameters').text = str(
data.get('override-build-parameters', False)).lower()
def authenticated_build(parser, xml_parent, data):
"""yaml: authenticated-build
Specifies an authorization matrix where only authenticated users
may trigger a build.
DEPRECATED
Example::
properties:
- authenticated-build
"""
# TODO: generalize this
if data:
security = XML.SubElement(xml_parent,
'hudson.security.'
'AuthorizationMatrixProperty')
XML.SubElement(security, 'permission').text = \
'hudson.model.Item.Build:authenticated'
def authorization(parser, xml_parent, data):
"""yaml: authorization
Specifies an authorization matrix
The available rights are:
job-delete
job-configure
job-read
job-extended-read
job-discover
job-build
job-workspace
job-cancel
run-delete
run-update
scm-tag
Example::
properties:
- authorization:
admin:
- job-delete
- job-configure
- job-read
- job-discover
- job-build
- job-workspace
- job-cancel
- run-delete
- run-update
- scm-tag
anonymous:
- job-discover
- job-read
- job-extended-read
"""
mapping = {
'job-delete': 'hudson.model.Item.Delete',
'job-configure': 'hudson.model.Item.Configure',
'job-read': 'hudson.model.Item.Read',
'job-extended-read': 'hudson.model.Item.ExtendedRead',
'job-discover': 'hudson.model.Item.Discover',
'job-build': 'hudson.model.Item.Build',
'job-workspace': 'hudson.model.Item.Workspace',
'job-cancel': 'hudson.model.Item.Cancel',
'run-delete': 'hudson.model.Run.Delete',
'run-update': 'hudson.model.Run.Update',
'scm-tag': 'hudson.scm.SCM.Tag'
}
if data:
matrix = XML.SubElement(xml_parent,
'hudson.security.AuthorizationMatrixProperty')
for (username, perms) in data.items():
for perm in perms:
pe = XML.SubElement(matrix, 'permission')
pe.text = "{0}:{1}".format(mapping[perm], username)
def extended_choice(parser, xml_parent, data):
"""yaml: extended-choice
Use of this config option is deprecated. You should use the
`extended-choice` option in the parameter section of the job configuration
instead.
"""
logger = logging.getLogger("%s:extended_choice" % __name__)
logger.warn('Use of the extended-choice property is deprecated. You '
'should use the extended-choice option in the parameter '
'section instead.')
definition = XML.SubElement(xml_parent,
'hudson.model.ParametersDefinitionProperty')
definitions = XML.SubElement(definition, 'parameterDefinitions')
parser.registry.dispatch('parameter', parser, definitions,
{'extended-choice': data})
def priority_sorter(parser, xml_parent, data):
"""yaml: priority-sorter
Allows simple ordering of builds, using a configurable job priority.
Requires the Jenkins :jenkins-wiki:`Priority Sorter Plugin
<Priority+Sorter+Plugin>`.
:arg int priority: Priority of the job. Higher value means higher
priority, with 100 as the standard priority. (required)
Example::
properties:
- priority-sorter:
priority: 150
"""
priority_sorter_tag = XML.SubElement(xml_parent,
'hudson.queueSorter.'
'PrioritySorterJobProperty')
XML.SubElement(priority_sorter_tag, 'priority').text = str(
data['priority'])
def build_blocker(parser, xml_parent, data):
"""yaml: build-blocker
This plugin keeps the actual job in the queue
if at least one name of currently running jobs
is matching with one of the given regular expressions.
Requires the Jenkins :jenkins-wiki:`Build Blocker Plugin
<Build+Blocker+Plugin>`.
:arg bool use-build-blocker: Enable or disable build blocker
(default true)
:arg list blocking-jobs: One regular expression per line
to select blocking jobs by their names. (required)
:arg str block-level: block build globally ('GLOBAL') or per node ('NODE')
(default 'GLOBAL')
:arg str queue-scanning: scan build queue for all builds ('ALL') or only
buildable builds ('BUILDABLE') (default 'DISABLED'))
Example:
.. literalinclude:: \
/../../tests/properties/fixtures/build-blocker01.yaml
"""
blocker = XML.SubElement(xml_parent,
'hudson.plugins.'
'buildblocker.BuildBlockerProperty')
if data is None or 'blocking-jobs' not in data:
raise JenkinsJobsException('blocking-jobs field is missing')
elif data.get('blocking-jobs', None) is None:
raise JenkinsJobsException('blocking-jobs list must not be empty')
XML.SubElement(blocker, 'useBuildBlocker').text = str(
data.get('use-build-blocker', True)).lower()
jobs = ''
for value in data['blocking-jobs']:
jobs = jobs + value + '\n'
XML.SubElement(blocker, 'blockingJobs').text = jobs
block_level_list = ('GLOBAL', 'NODE')
block_level = data.get('block-level', 'GLOBAL')
if block_level not in block_level_list:
raise InvalidAttributeError('block-level',
block_level,
block_level_list)
XML.SubElement(blocker, 'blockLevel').text = block_level
queue_scanning_list = ('DISABLED', 'ALL', 'BUILDABLE')
queue_scanning = data.get('queue-scanning', 'DISABLED')
if queue_scanning not in queue_scanning_list:
raise InvalidAttributeError('queue-scanning',
queue_scanning,
queue_scanning_list)
XML.SubElement(blocker, 'scanQueueFor').text = queue_scanning
def copyartifact(parser, xml_parent, data):
"""yaml: copyartifact
Specify a list of projects that have access to copy the artifacts of
this project.
Requires the Jenkins :jenkins-wiki:`Copy Artifact plugin
<Copy+Artifact+Plugin>`.
:arg string projects: comma separated list of projects that can copy
artifacts of this project. Wild card character '*' is available.
Example:
.. literalinclude:: \
/../../tests/properties/fixtures/copyartifact.yaml
"""
copyartifact = XML.SubElement(xml_parent,
'hudson.plugins.'
'copyartifact.'
'CopyArtifactPermissionProperty',
plugin='copyartifact')
if not data or not data.get('projects', None):
raise JenkinsJobsException("projects string must exist and "
"not be empty")
projectlist = XML.SubElement(copyartifact, 'projectNameList')
XML.SubElement(projectlist, 'string').text = data.get('projects')
def batch_tasks(parser, xml_parent, data):
"""yaml: batch-tasks
Batch tasks can be tasks for events like releases, integration, archiving,
etc. In this way, anyone in the project team can execute them in a way that
leaves a record.
A batch task consists of a shell script and a name. When you execute
a build, the shell script gets run on the workspace, just like a build.
Batch tasks and builds "lock" the workspace, so when one of those
activities is in progress, all the others will block in the queue.
Requires the Jenkins :jenkins-wiki:`Batch Task Plugin <Batch+Task+Plugin>`.
:arg list batch-tasks: Batch tasks.
:Task: * **name** (`str`) Task name.
* **script** (`str`) Task script.
Example:
.. literalinclude:: /../../tests/properties/fixtures/batch-task.yaml
"""
pdef = XML.SubElement(xml_parent,
'hudson.plugins.batch__task.BatchTaskProperty')
tasks = XML.SubElement(pdef, 'tasks')
for task in data:
batch_task = XML.SubElement(tasks,
'hudson.plugins.batch__task.BatchTask')
XML.SubElement(batch_task, 'name').text = task['name']
XML.SubElement(batch_task, 'script').text = task['script']
def heavy_job(parser, xml_parent, data):
"""yaml: heavy-job
This plugin allows you to define "weight" on each job,
and making each job consume that many executors
Requires the Jenkins :jenkins-wiki:`Heavy Job Plugin <Heavy+Job+Plugin>`.
:arg int weight: Specify the total number of executors
that this job should occupy (default 1)
Example:
.. literalinclude:: /../../tests/properties/fixtures/heavy-job.yaml
"""
heavyjob = XML.SubElement(xml_parent,
'hudson.plugins.'
'heavy__job.HeavyJobProperty')
XML.SubElement(heavyjob, 'weight').text = str(
data.get('weight', 1))
def slave_utilization(parser, xml_parent, data):
"""yaml: slave-utilization
This plugin allows you to specify the percentage of a slave's capacity a
job wants to use.
Requires the Jenkins :jenkins-wiki:`Slave Utilization Plugin
<Slave+Utilization+Plugin>`.
:arg int slave-percentage: Specify the percentage of a slave's execution
slots that this job should occupy (default: 0)
:arg bool single-instance-per-slave: Control whether concurrent instances
of this job will be permitted to run in parallel on a single slave
(default: False)
Example:
.. literalinclude:: \
/../../tests/properties/fixtures/slave-utilization1.yaml
"""
utilization = XML.SubElement(
xml_parent, 'com.suryagaddipati.jenkins.SlaveUtilizationProperty')
percent = int(data.get('slave-percentage', 0))
XML.SubElement(utilization, 'needsExclusiveAccessToNode'
).text = 'true' if percent else 'false'
XML.SubElement(utilization, 'slaveUtilizationPercentage'
).text = str(percent)
XML.SubElement(utilization, 'singleInstancePerSlave').text = str(
data.get('single-instance-per-slave', False)).lower()
def delivery_pipeline(parser, xml_parent, data):
"""yaml: delivery-pipeline
Requires the Jenkins :jenkins-wiki:`Delivery Pipeline Plugin
<Delivery+Pipeline+Plugin>`.
:arg str stage: Name of the stage for this job (default: '')
:arg str task: Name of the task for this job (default: '')
Example:
.. literalinclude:: \
/../../tests/properties/fixtures/delivery-pipeline1.yaml
"""
pipeline = XML.SubElement(xml_parent,
'se.diabol.jenkins.pipeline.'
'PipelineProperty')
XML.SubElement(pipeline, 'stageName').text = data.get('stage', '')
XML.SubElement(pipeline, 'taskName').text = data.get('task', '')
def zeromq_event(parser, xml_parent, data):
"""yaml: zeromq-event
This is a Jenkins plugin that will publish Jenkins Job run events
(start, complete, finish) to a ZMQ PUB socket.
Requires the Jenkins `ZMQ Event Publisher.
<https://git.openstack.org/cgit/openstack-infra/zmq-event-publisher>`_
Example:
.. literalinclude:: \
/../../tests/properties/fixtures/zeromq-event.yaml
"""
zmq_event = XML.SubElement(xml_parent,
'org.jenkinsci.plugins.'
'ZMQEventPublisher.HudsonNotificationProperty')
XML.SubElement(zmq_event, 'enabled').text = 'true'
class Properties(jenkins_jobs.modules.base.Base):
sequence = 20
component_type = 'property'
component_list_type = 'properties'
def gen_xml(self, parser, xml_parent, data):
properties = xml_parent.find('properties')
if properties is None:
properties = XML.SubElement(xml_parent, 'properties')
for prop in data.get('properties', []):
self.registry.dispatch('property', parser, properties, prop)
|
apache-2.0
|
Gitlab11/theHarvester
|
discovery/baidusearch.py
|
21
|
1298
|
import httplib
import myparser
import time
import sys
class search_baidu:
def __init__(self, word, limit):
self.word = word
self.total_results = ""
self.server = "www.baidu.com"
self.hostname = "www.baidu.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = 0
def do_search(self):
h = httplib.HTTP(self.server)
h.putrequest('GET', "/s?wd=%40" + self.word
+ "&pn=" + str(self.counter))
h.putheader('Host', self.hostname)
h.putheader('User-agent', self.userAgent)
h.endheaders()
returncode, returnmsg, headers = h.getreply()
self.total_results += h.getfile().read()
def process(self):
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
time.sleep(1)
print "\tSearching " + str(self.counter) + " results..."
self.counter += 10
def get_emails(self):
rawres = myparser.parser(self.total_results, self.word)
return rawres.emails()
def get_hostnames(self):
rawres = myparser.parser(self.total_results, self.word)
return rawres.hostnames()
|
gpl-2.0
|
makinacorpus/Geotrek
|
geotrek/feedback/tests/test_commands.py
|
2
|
1330
|
from io import StringIO
from django.core.management import call_command
from django.test import TestCase
from django.utils import timezone
from geotrek.feedback.models import Report
from geotrek.feedback.factories import ReportFactory
class TestRemoveEmailsOlders(TestCase):
"""Test command erase_emails, if older emails are removed"""
def setUp(self):
# Create two reports
self.old_report = ReportFactory(email="to_erase@you.com")
self.recent_report = ReportFactory(email="yeah@you.com")
# Modify date_insert for old_report
one_year_one_day = timezone.timedelta(days=370)
self.old_report.date_insert = timezone.now() - one_year_one_day
self.old_report.save()
def test_erase_old_emails(self):
output = StringIO()
call_command('erase_emails', stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "")
self.assertEqual(old_report.__str__(), "Anonymous report")
def test_dry_run_command(self):
"""Test if dry_run mode keeps emails"""
output = StringIO()
call_command('erase_emails', dry_run=True, stdout=output)
old_report = Report.objects.get(id=self.old_report.id)
self.assertEqual(old_report.email, "to_erase@you.com")
|
bsd-2-clause
|
nurmd2/nurmd
|
addons/sale/wizard/sale_make_invoice_advance.py
|
11
|
7094
|
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from openerp import api, fields, models, _
import openerp.addons.decimal_precision as dp
from openerp.exceptions import UserError
class SaleAdvancePaymentInv(models.TransientModel):
_name = "sale.advance.payment.inv"
_description = "Sales Advance Payment Invoice"
@api.model
def _count(self):
return len(self._context.get('active_ids', []))
@api.model
def _get_advance_payment_method(self):
if self._count() == 1:
sale_obj = self.env['sale.order']
order = sale_obj.browse(self._context.get('active_ids'))[0]
if all([line.product_id.invoice_policy == 'order' for line in order.order_line]):
return 'all'
return 'delivered'
advance_payment_method = fields.Selection([
('delivered', 'Invoiceable lines'),
('all', 'Invoiceable lines (deduct down payments)'),
('percentage', 'Down payment (percentage)'),
('fixed', 'Down payment (fixed amount)')
], string='What do you want to invoice?', default=_get_advance_payment_method, required=True)
product_id = fields.Many2one('product.product', string='Down Payment Product', domain=[('type', '=', 'service')],\
default=lambda self: self.env['ir.values'].get_default('sale.config.settings', 'deposit_product_id_setting'))
count = fields.Integer(default=_count, string='# of Orders')
amount = fields.Float('Down Payment Amount', digits=dp.get_precision('Account'), help="The amount to be invoiced in advance, taxes excluded.")
deposit_account_id = fields.Many2one("account.account", string="Income Account", domain=[('deprecated', '=', False)],\
help="Account used for deposits")
deposit_taxes_id = fields.Many2many("account.tax", string="Customer Taxes", help="Taxes used for deposits")
@api.onchange('advance_payment_method')
def onchange_advance_payment_method(self):
if self.advance_payment_method == 'percentage':
return {'value': {'amount':0, 'product_id':False}}
return {}
@api.multi
def _create_invoice(self, order, so_line, amount):
inv_obj = self.env['account.invoice']
ir_property_obj = self.env['ir.property']
account_id = False
if self.product_id.id:
account_id = self.product_id.property_account_income_id.id
if not account_id:
prop = ir_property_obj.get('property_account_income_categ_id', 'product.category')
prop_id = prop and prop.id or False
account_id = order.fiscal_position_id.map_account(prop_id)
if not account_id:
raise UserError(
_('There is no income account defined for this product: "%s". You may have to install a chart of account from Accounting app, settings menu.') % \
(self.product_id.name,))
if self.amount <= 0.00:
raise UserError(_('The value of the down payment amount must be positive.'))
if self.advance_payment_method == 'percentage':
amount = order.amount_untaxed * self.amount / 100
name = _("Down payment of %s%%") % (self.amount,)
else:
amount = self.amount
name = _('Down Payment')
invoice = inv_obj.create({
'name': order.client_order_ref or order.name,
'origin': order.name,
'type': 'out_invoice',
'reference': False,
'account_id': order.partner_id.property_account_receivable_id.id,
'partner_id': order.partner_invoice_id.id,
'invoice_line_ids': [(0, 0, {
'name': name,
'origin': order.name,
'account_id': account_id,
'price_unit': amount,
'quantity': 1.0,
'discount': 0.0,
'uom_id': self.product_id.uom_id.id,
'product_id': self.product_id.id,
'sale_line_ids': [(6, 0, [so_line.id])],
'invoice_line_tax_ids': [(6, 0, [x.id for x in self.product_id.taxes_id])],
'account_analytic_id': order.project_id.id or False,
})],
'currency_id': order.pricelist_id.currency_id.id,
'payment_term_id': order.payment_term_id.id,
'fiscal_position_id': order.fiscal_position_id.id or order.partner_id.property_account_position_id.id,
'team_id': order.team_id.id,
})
invoice.compute_taxes()
return invoice
@api.multi
def create_invoices(self):
sale_orders = self.env['sale.order'].browse(self._context.get('active_ids', []))
if self.advance_payment_method == 'delivered':
sale_orders.action_invoice_create()
elif self.advance_payment_method == 'all':
sale_orders.action_invoice_create(final=True)
else:
# Create deposit product if necessary
if not self.product_id:
vals = self._prepare_deposit_product()
self.product_id = self.env['product.product'].create(vals)
self.env['ir.values'].set_default('sale.config.settings', 'deposit_product_id_setting', self.product_id.id)
sale_line_obj = self.env['sale.order.line']
for order in sale_orders:
if self.advance_payment_method == 'percentage':
amount = order.amount_untaxed * self.amount / 100
else:
amount = self.amount
if self.product_id.invoice_policy != 'order':
raise UserError(_('The product used to invoice a down payment should have an invoice policy set to "Ordered quantities". Please update your deposit product to be able to create a deposit invoice.'))
if self.product_id.type != 'service':
raise UserError(_("The product used to invoice a down payment should be of type 'Service'. Please use another product or update this product."))
so_line = sale_line_obj.create({
'name': _('Advance: %s') % (time.strftime('%m %Y'),),
'price_unit': amount,
'product_uom_qty': 0.0,
'order_id': order.id,
'discount': 0.0,
'product_uom': self.product_id.uom_id.id,
'product_id': self.product_id.id,
'tax_id': [(6, 0, self.product_id.taxes_id.ids)],
})
self._create_invoice(order, so_line, amount)
if self._context.get('open_invoices', False):
return sale_orders.action_view_invoice()
return {'type': 'ir.actions.act_window_close'}
def _prepare_deposit_product(self):
return {
'name': 'Down payment',
'type': 'service',
'invoice_policy': 'order',
'property_account_income_id': self.deposit_account_id.id,
'taxes_id': [(6, 0, self.deposit_taxes_id.ids)],
}
|
gpl-3.0
|
armab/st2
|
st2common/tests/unit/test_hash.py
|
9
|
1458
|
# Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
from st2common.util import hash as hash_utils
from st2common.util import auth as auth_utils
class TestHashWithApiKeys(unittest2.TestCase):
def test_hash_repeatability(self):
api_key = auth_utils.generate_api_key()
hash1 = hash_utils.hash(api_key)
hash2 = hash_utils.hash(api_key)
self.assertEqual(hash1, hash2, 'Expected a repeated hash.')
def test_hash_uniqueness(self):
count = 10000
api_keys = [auth_utils.generate_api_key() for _ in range(count)]
hashes = set([hash_utils.hash(api_key) for api_key in api_keys])
self.assertEqual(len(hashes), count, 'Expected all unique hashes.')
|
apache-2.0
|
SaranyaKarthikeyan/boto
|
tests/unit/ec2/elb/test_attribute.py
|
114
|
8314
|
from tests.unit import unittest
from tests.compat import mock
from boto.ec2.elb import ELBConnection
from boto.ec2.elb import LoadBalancer
from boto.ec2.elb.attributes import LbAttributes
ATTRIBUTE_GET_TRUE_CZL_RESPONSE = b"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
<DescribeLoadBalancerAttributesResult>
<LoadBalancerAttributes>
<CrossZoneLoadBalancing>
<Enabled>true</Enabled>
</CrossZoneLoadBalancing>
</LoadBalancerAttributes>
</DescribeLoadBalancerAttributesResult>
<ResponseMetadata>
<RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
</ResponseMetadata>
</DescribeLoadBalancerAttributesResponse>
"""
ATTRIBUTE_GET_FALSE_CZL_RESPONSE = b"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
<DescribeLoadBalancerAttributesResult>
<LoadBalancerAttributes>
<CrossZoneLoadBalancing>
<Enabled>false</Enabled>
</CrossZoneLoadBalancing>
</LoadBalancerAttributes>
</DescribeLoadBalancerAttributesResult>
<ResponseMetadata>
<RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
</ResponseMetadata>
</DescribeLoadBalancerAttributesResponse>
"""
ATTRIBUTE_GET_CS_RESPONSE = b"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
<DescribeLoadBalancerAttributesResult>
<LoadBalancerAttributes>
<ConnectionSettings>
<IdleTimeout>30</IdleTimeout>
</ConnectionSettings>
</LoadBalancerAttributes>
</DescribeLoadBalancerAttributesResult>
<ResponseMetadata>
<RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
</ResponseMetadata>
</DescribeLoadBalancerAttributesResponse>
"""
ATTRIBUTE_SET_RESPONSE = b"""<?xml version="1.0" encoding="UTF-8"?>
<ModifyLoadBalancerAttributesResponse xmlns="http://elasticloadbalancing.amazonaws.com/doc/2012-06-01/">
<ModifyLoadBalancerAttributesResult/>
<ResponseMetadata>
<RequestId>83c88b9d-12b7-11e3-8b82-87b12EXAMPLE</RequestId>
</ResponseMetadata>
</ModifyLoadBalancerAttributesResponse>
"""
# make_request arguments for setting attributes.
# Format: (API_COMMAND, API_PARAMS, API_PATH, API_METHOD)
ATTRIBUTE_SET_CZL_TRUE_REQUEST = (
'ModifyLoadBalancerAttributes',
{'LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled': 'true',
'LoadBalancerName': 'test_elb'}, mock.ANY, mock.ANY)
ATTRIBUTE_SET_CZL_FALSE_REQUEST = (
'ModifyLoadBalancerAttributes',
{'LoadBalancerAttributes.CrossZoneLoadBalancing.Enabled': 'false',
'LoadBalancerName': 'test_elb'}, mock.ANY, mock.ANY)
# Tests to be run on an LbAttributes
# Format:
# (EC2_RESPONSE_STRING, list( (string_of_attribute_to_test, value) ) )
ATTRIBUTE_TESTS = [
(ATTRIBUTE_GET_TRUE_CZL_RESPONSE,
[('cross_zone_load_balancing.enabled', True)]),
(ATTRIBUTE_GET_FALSE_CZL_RESPONSE,
[('cross_zone_load_balancing.enabled', False)]),
(ATTRIBUTE_GET_CS_RESPONSE,
[('connecting_settings.idle_timeout', 30)]),
]
class TestLbAttributes(unittest.TestCase):
"""Tests LB Attributes."""
def _setup_mock(self):
"""Sets up a mock elb request.
Returns: response, elb connection and LoadBalancer
"""
mock_response = mock.Mock()
mock_response.status = 200
elb = ELBConnection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key')
elb.make_request = mock.Mock(return_value=mock_response)
return mock_response, elb, LoadBalancer(elb, 'test_elb')
def _verify_attributes(self, attributes, attr_tests):
"""Verifies an LbAttributes object."""
for attr, result in attr_tests:
attr_result = attributes
for sub_attr in attr.split('.'):
attr_result = getattr(attr_result, sub_attr, None)
self.assertEqual(attr_result, result)
def test_get_all_lb_attributes(self):
"""Tests getting the LbAttributes from the elb.connection."""
mock_response, elb, _ = self._setup_mock()
for response, attr_tests in ATTRIBUTE_TESTS:
mock_response.read.return_value = response
attributes = elb.get_all_lb_attributes('test_elb')
self.assertTrue(isinstance(attributes, LbAttributes))
self._verify_attributes(attributes, attr_tests)
def test_get_lb_attribute(self):
"""Tests getting a single attribute from elb.connection."""
mock_response, elb, _ = self._setup_mock()
tests = [
('crossZoneLoadBalancing', True, ATTRIBUTE_GET_TRUE_CZL_RESPONSE),
('crossZoneLoadBalancing', False, ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
]
for attr, value, response in tests:
mock_response.read.return_value = response
status = elb.get_lb_attribute('test_elb', attr)
self.assertEqual(status, value)
def test_modify_lb_attribute(self):
"""Tests setting the attributes from elb.connection."""
mock_response, elb, _ = self._setup_mock()
tests = [
('crossZoneLoadBalancing', True, ATTRIBUTE_SET_CZL_TRUE_REQUEST),
('crossZoneLoadBalancing', False, ATTRIBUTE_SET_CZL_FALSE_REQUEST),
]
for attr, value, args in tests:
mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
result = elb.modify_lb_attribute('test_elb', attr, value)
self.assertTrue(result)
elb.make_request.assert_called_with(*args)
def test_lb_get_attributes(self):
"""Tests the LbAttributes from the ELB object."""
mock_response, _, lb = self._setup_mock()
for response, attr_tests in ATTRIBUTE_TESTS:
mock_response.read.return_value = response
attributes = lb.get_attributes(force=True)
self.assertTrue(isinstance(attributes, LbAttributes))
self._verify_attributes(attributes, attr_tests)
def test_lb_is_cross_zone_load_balancing(self):
"""Tests checking is_cross_zone_load_balancing."""
mock_response, _, lb = self._setup_mock()
tests = [
# Format: (method, args, result, response)
# Gets a true result.
(lb.is_cross_zone_load_balancing, [], True,
ATTRIBUTE_GET_TRUE_CZL_RESPONSE),
# Returns the previous calls cached value.
(lb.is_cross_zone_load_balancing, [], True,
ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
# Gets a false result.
(lb.is_cross_zone_load_balancing, [True], False,
ATTRIBUTE_GET_FALSE_CZL_RESPONSE),
]
for method, args, result, response in tests:
mock_response.read.return_value = response
self.assertEqual(method(*args), result)
def test_lb_enable_cross_zone_load_balancing(self):
"""Tests enabling cross zone balancing from LoadBalancer."""
mock_response, elb, lb = self._setup_mock()
mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
self.assertTrue(lb.enable_cross_zone_load_balancing())
elb.make_request.assert_called_with(*ATTRIBUTE_SET_CZL_TRUE_REQUEST)
def test_lb_disable_cross_zone_load_balancing(self):
"""Tests disabling cross zone balancing from LoadBalancer."""
mock_response, elb, lb = self._setup_mock()
mock_response.read.return_value = ATTRIBUTE_SET_RESPONSE
self.assertTrue(lb.disable_cross_zone_load_balancing())
elb.make_request.assert_called_with(*ATTRIBUTE_SET_CZL_FALSE_REQUEST)
def test_lb_get_connection_settings(self):
"""Tests checking connectionSettings attribute"""
mock_response, elb, _ = self._setup_mock()
attrs = [('idle_timeout', 30), ]
mock_response.read.return_value = ATTRIBUTE_GET_CS_RESPONSE
attributes = elb.get_all_lb_attributes('test_elb')
self.assertTrue(isinstance(attributes, LbAttributes))
for attr, value in attrs:
self.assertEqual(getattr(attributes.connecting_settings, attr), value)
if __name__ == '__main__':
unittest.main()
|
mit
|
JoaoVasques/aws-devtool
|
eb/linux/python3/lib/aws/requests/packages/chardet2/sjisprober.py
|
25
|
3654
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import SJISDistributionAnalysis
from .jpcntx import SJISContextAnalysis
from .mbcssm import SJISSMModel
from . import constants
import sys
from .constants import eStart, eError, eItsMe
class SJISProber(MultiByteCharSetProber):
def __init__(self):
MultiByteCharSetProber.__init__(self)
self._mCodingSM = CodingStateMachine(SJISSMModel)
self._mDistributionAnalyzer = SJISDistributionAnalysis()
self._mContextAnalyzer = SJISContextAnalysis()
self.reset()
def reset(self):
MultiByteCharSetProber.reset(self)
self._mContextAnalyzer.reset()
def get_charset_name(self):
return "SHIFT_JIS"
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == eError:
if constants._debug:
sys.stderr.write(self.get_charset_name() + ' prober hit error at byte ' + str(i) + '\n')
self._mState = constants.eNotMe
break
elif codingState == eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mContextAnalyzer.feed(self._mLastChar[2 - charLen :], charLen)
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mContextAnalyzer.feed(aBuf[i + 1 - charLen : i + 3 - charLen], charLen)
self._mDistributionAnalyzer.feed(aBuf[i - 1 : i + 1], charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if self._mContextAnalyzer.got_enough_data() and \
(self.get_confidence() > constants.SHORTCUT_THRESHOLD):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
contxtCf = self._mContextAnalyzer.get_confidence()
distribCf = self._mDistributionAnalyzer.get_confidence()
return max(contxtCf, distribCf)
|
apache-2.0
|
umitproject/site-status
|
django/contrib/auth/decorators.py
|
230
|
2388
|
import urlparse
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.conf import settings
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.utils.decorators import available_attrs
def user_passes_test(test_func, login_url=None, redirect_field_name=REDIRECT_FIELD_NAME):
"""
Decorator for views that checks that the user passes the given test,
redirecting to the log-in page if necessary. The test should be a callable
that takes the user object and returns True if the user passes.
"""
def decorator(view_func):
@wraps(view_func, assigned=available_attrs(view_func))
def _wrapped_view(request, *args, **kwargs):
if test_func(request.user):
return view_func(request, *args, **kwargs)
path = request.build_absolute_uri()
# If the login url is the same scheme and net location then just
# use the path as the "next" url.
login_scheme, login_netloc = urlparse.urlparse(login_url or
settings.LOGIN_URL)[:2]
current_scheme, current_netloc = urlparse.urlparse(path)[:2]
if ((not login_scheme or login_scheme == current_scheme) and
(not login_netloc or login_netloc == current_netloc)):
path = request.get_full_path()
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(path, login_url, redirect_field_name)
return _wrapped_view
return decorator
def login_required(function=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is logged in, redirecting
to the log-in page if necessary.
"""
actual_decorator = user_passes_test(
lambda u: u.is_authenticated(),
login_url=login_url,
redirect_field_name=redirect_field_name
)
if function:
return actual_decorator(function)
return actual_decorator
def permission_required(perm, login_url=None):
"""
Decorator for views that checks whether a user has a particular permission
enabled, redirecting to the log-in page if necessary.
"""
return user_passes_test(lambda u: u.has_perm(perm), login_url=login_url)
|
agpl-3.0
|
sivaramakrishnansr/ryu
|
ryu/services/protocols/vrrp/rpc_manager.py
|
8
|
7329
|
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ryu import cfg
import socket
import netaddr
from ryu.base import app_manager
from ryu.controller import handler
from ryu.services.protocols.vrrp import event as vrrp_event
from ryu.services.protocols.vrrp import api as vrrp_api
from ryu.lib import rpc
from ryu.lib import hub
from ryu.lib import mac
VRRP_RPC_PORT = 50004 # random
class RPCError(Exception):
pass
class Peer(object):
def __init__(self, queue):
super(Peer, self).__init__()
self.queue = queue
def _handle_vrrp_request(self, data):
self.queue.put((self, data))
class RpcVRRPManager(app_manager.RyuApp):
def __init__(self, *args, **kwargs):
super(RpcVRRPManager, self).__init__(*args, **kwargs)
self.CONF.register_opts([
cfg.IntOpt('vrrp-rpc-port', default=VRRP_RPC_PORT,
help='port for vrrp rpc interface')])
self._args = args
self._kwargs = kwargs
self._peers = []
self._rpc_events = hub.Queue(128)
self.server_thread = hub.spawn(self._peer_accept_thread)
self.event_thread = hub.spawn(self._rpc_request_loop_thread)
def _rpc_request_loop_thread(self):
while True:
(peer, data) = self._rpc_events.get()
msgid, target_method, params = data
error = None
result = None
try:
if target_method == b'vrrp_config':
result = self._config(msgid, params)
elif target_method == b'vrrp_list':
result = self._list(msgid, params)
elif target_method == b'vrrp_config_change':
result = self._config_change(msgid, params)
else:
error = 'Unknown method %s' % (target_method)
except RPCError as e:
error = str(e)
peer._endpoint.send_response(msgid, error=error, result=result)
def _peer_loop_thread(self, peer):
peer._endpoint.serve()
# the peer connection is closed
self._peers.remove(peer)
def peer_accept_handler(self, new_sock, addr):
peer = Peer(self._rpc_events)
table = {
rpc.MessageType.REQUEST: peer._handle_vrrp_request,
}
peer._endpoint = rpc.EndPoint(new_sock, disp_table=table)
self._peers.append(peer)
hub.spawn(self._peer_loop_thread, peer)
def _peer_accept_thread(self):
server = hub.StreamServer(('', self.CONF.vrrp_rpc_port),
self.peer_accept_handler)
server.serve_forever()
def _params_to_dict(self, params, keys):
d = {}
for k, v in params.items():
if k in keys:
d[k] = v
return d
def _config(self, msgid, params):
self.logger.debug('handle vrrp_config request')
try:
param_dict = params[0]
except:
raise RPCError('parameters are missing')
if_params = self._params_to_dict(param_dict,
('primary_ip_address',
'device_name'))
# drop vlan support later
if_params['vlan_id'] = None
if_params['mac_address'] = mac.DONTCARE_STR
try:
interface = vrrp_event.VRRPInterfaceNetworkDevice(**if_params)
except:
raise RPCError('parameters are invalid, %s' % (str(param_dict)))
config_params = self._params_to_dict(param_dict,
('vrid', # mandatory
'ip_addresses', # mandatory
'version',
'admin_state',
'priority',
'advertisement_interval',
'preempt_mode',
'preempt_delay',
'statistics_interval'))
try:
config = vrrp_event.VRRPConfig(**config_params)
except:
raise RPCError('parameters are invalid, %s' % (str(param_dict)))
config_result = vrrp_api.vrrp_config(self, interface, config)
api_result = [
config_result.config.vrid,
config_result.config.priority,
str(netaddr.IPAddress(config_result.config.ip_addresses[0]))]
return api_result
def _lookup_instance(self, vrid):
for instance in vrrp_api.vrrp_list(self).instance_list:
if vrid == instance.config.vrid:
return instance.instance_name
return None
def _config_change(self, msgid, params):
self.logger.debug('handle vrrp_config_change request')
try:
config_values = params[0]
except:
raise RPCError('parameters are missing')
vrid = config_values.get('vrid')
instance_name = self._lookup_instance(vrid)
if not instance_name:
raise RPCError('vrid %d is not found' % (vrid))
priority = config_values.get('priority')
interval = config_values.get('advertisement_interval')
vrrp_api.vrrp_config_change(self, instance_name, priority=priority,
advertisement_interval=interval)
return {}
def _list(self, msgid, params):
self.logger.debug('handle vrrp_list request')
result = vrrp_api.vrrp_list(self)
instance_list = result.instance_list
ret_list = []
for instance in instance_list:
c = instance.config
info_dict = {
"instance_name": instance.instance_name,
"vrid": c.vrid,
"version": c.version,
"advertisement_interval": c.advertisement_interval,
"priority": c.priority,
"virtual_ip_address": str(netaddr.IPAddress(c.ip_addresses[0]))
}
ret_list.append(info_dict)
return ret_list
@handler.set_ev_cls(vrrp_event.EventVRRPStateChanged)
def vrrp_state_changed_handler(self, ev):
self.logger.info('handle EventVRRPStateChanged')
name = ev.instance_name
old_state = ev.old_state
new_state = ev.new_state
vrid = ev.config.vrid
self.logger.info('VRID:%s %s: %s -> %s', vrid, name, old_state,
new_state)
params = {'vrid': vrid, 'old_state': old_state, 'new_state': new_state}
for peer in self._peers:
peer._endpoint.send_notification("notify_status", [params])
|
apache-2.0
|
chandranaik/Aligning-of-PIE-with-rfc8033-in-ns3
|
src/wifi/test/examples-to-run.py
|
8
|
72014
|
#! /usr/bin/env python
## -*- Mode: python; py-indent-offset: 4; indent-tabs-mode: nil; coding: utf-8; -*-
# A list of C++ examples to run in order to ensure that they remain
# buildable and runnable over time. Each tuple in the list contains
#
# (example_name, do_run, do_valgrind_run).
#
# See test.py for more information.
cpp_examples = [
("wifi-phy-configuration --testCase=0", "True", "True"),
("wifi-phy-configuration --testCase=1", "True", "False"),
("wifi-phy-configuration --testCase=2", "True", "False"),
("wifi-phy-configuration --testCase=3", "True", "False"),
("wifi-phy-configuration --testCase=4", "True", "False"),
("wifi-phy-configuration --testCase=5", "True", "False"),
("wifi-phy-configuration --testCase=6", "True", "False"),
("wifi-phy-configuration --testCase=7", "True", "False"),
("wifi-phy-configuration --testCase=8", "True", "False"),
("wifi-phy-configuration --testCase=9", "True", "False"),
("wifi-phy-configuration --testCase=10", "True", "False"),
("wifi-phy-configuration --testCase=11", "True", "False"),
("wifi-phy-configuration --testCase=12", "True", "False"),
("wifi-phy-configuration --testCase=13", "True", "False"),
("wifi-phy-configuration --testCase=14", "True", "False"),
("wifi-phy-configuration --testCase=15", "True", "False"),
("wifi-phy-configuration --testCase=16", "True", "False"),
("wifi-phy-configuration --testCase=17", "True", "False"),
("wifi-phy-configuration --testCase=18", "True", "False"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarf --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Aarfcd --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Amrr --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Arf --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Arf --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Arf --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Arf --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Arf --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Arf --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Cara --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Cara --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Cara --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Cara --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Cara --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Cara --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Onoe --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Rraa --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Minstrel --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=MinstrelHt --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11a --stepTime=0.1", "True", "True"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11b --serverChannelWidth=22 --clientChannelWidth=22 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11g --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11-holland --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11-10MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11-5MHz --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11n-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ac --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=400 --clientShortGuardInterval=400 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=80 --clientChannelWidth=80 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-5GHz --serverChannelWidth=160 --clientChannelWidth=160 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=1 --clientNss=1 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "True", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=2 --clientNss=2 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=3 --clientNss=3 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=20 --clientChannelWidth=20 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=800 --clientShortGuardInterval=800 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=1600 --clientShortGuardInterval=1600 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("wifi-manager-example --wifiManager=Ideal --standard=802.11ax-2.4GHz --serverChannelWidth=40 --clientChannelWidth=40 --serverShortGuardInterval=3200 --clientShortGuardInterval=3200 --serverNss=4 --clientNss=4 --stepTime=0.1", "False", "False"),
("test-interference-helper --enableCapture=0 --txPowerA=5 --txPowerB=15 --delay=10 --txModeA=OfdmRate6Mbps --txModeB=OfdmRate6Mbps --checkResults=1 --expectRxASuccessfull=0 --expectRxBSuccessfull=0", "True", "True"),
("test-interference-helper --enableCapture=1 --txPowerA=5 --txPowerB=15 --delay=10 --txModeA=OfdmRate6Mbps --txModeB=OfdmRate6Mbps --checkResults=1 --expectRxASuccessfull=0 --expectRxBSuccessfull=1", "True", "False"),
]
# A list of Python examples to run in order to ensure that they remain
# runnable over time. Each tuple in the list contains
#
# (example_name, do_run).
#
# See test.py for more information.
python_examples = []
|
gpl-2.0
|
AsimmHirani/ISpyPi
|
tensorflow/contrib/tensorflow-master/tensorflow/contrib/slim/python/slim/nets/vgg.py
|
164
|
11241
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains model definitions for versions of the Oxford VGG network.
These model definitions were introduced in the following technical report:
Very Deep Convolutional Networks For Large-Scale Image Recognition
Karen Simonyan and Andrew Zisserman
arXiv technical report, 2015
PDF: http://arxiv.org/pdf/1409.1556.pdf
ILSVRC 2014 Slides: http://www.robots.ox.ac.uk/~karen/pdf/ILSVRC_2014.pdf
CC-BY-4.0
More information can be obtained from the VGG website:
www.robots.ox.ac.uk/~vgg/research/very_deep/
Usage:
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_a(inputs)
with slim.arg_scope(vgg.vgg_arg_scope()):
outputs, end_points = vgg.vgg_16(inputs)
@@vgg_a
@@vgg_16
@@vgg_19
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib import layers
from tensorflow.contrib.framework.python.ops import arg_scope
from tensorflow.contrib.layers.python.layers import layers as layers_lib
from tensorflow.contrib.layers.python.layers import regularizers
from tensorflow.contrib.layers.python.layers import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import variable_scope
def vgg_arg_scope(weight_decay=0.0005):
"""Defines the VGG arg scope.
Args:
weight_decay: The l2 regularization coefficient.
Returns:
An arg_scope.
"""
with arg_scope(
[layers.conv2d, layers_lib.fully_connected],
activation_fn=nn_ops.relu,
weights_regularizer=regularizers.l2_regularizer(weight_decay),
biases_initializer=init_ops.zeros_initializer()):
with arg_scope([layers.conv2d], padding='SAME') as arg_sc:
return arg_sc
def vgg_a(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_a'):
"""Oxford Net VGG 11-Layers version A Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with variable_scope.variable_scope(scope, 'vgg_a', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with arg_scope(
[layers.conv2d, layers_lib.max_pool2d],
outputs_collections=end_points_collection):
net = layers_lib.repeat(
inputs, 1, layers.conv2d, 64, [3, 3], scope='conv1')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool1')
net = layers_lib.repeat(net, 1, layers.conv2d, 128, [3, 3], scope='conv2')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool2')
net = layers_lib.repeat(net, 2, layers.conv2d, 256, [3, 3], scope='conv3')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool3')
net = layers_lib.repeat(net, 2, layers.conv2d, 512, [3, 3], scope='conv4')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool4')
net = layers_lib.repeat(net, 2, layers.conv2d, 512, [3, 3], scope='conv5')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = layers.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout6')
net = layers.conv2d(net, 4096, [1, 1], scope='fc7')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout7')
net = layers.conv2d(
net,
num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = array_ops.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_a.default_image_size = 224
def vgg_16(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_16'):
"""Oxford Net VGG 16-Layers version D Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with variable_scope.variable_scope(scope, 'vgg_16', [inputs]) as sc:
end_points_collection = sc.original_name_scope + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with arg_scope(
[layers.conv2d, layers_lib.fully_connected, layers_lib.max_pool2d],
outputs_collections=end_points_collection):
net = layers_lib.repeat(
inputs, 2, layers.conv2d, 64, [3, 3], scope='conv1')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool1')
net = layers_lib.repeat(net, 2, layers.conv2d, 128, [3, 3], scope='conv2')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool2')
net = layers_lib.repeat(net, 3, layers.conv2d, 256, [3, 3], scope='conv3')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool3')
net = layers_lib.repeat(net, 3, layers.conv2d, 512, [3, 3], scope='conv4')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool4')
net = layers_lib.repeat(net, 3, layers.conv2d, 512, [3, 3], scope='conv5')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = layers.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout6')
net = layers.conv2d(net, 4096, [1, 1], scope='fc7')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout7')
net = layers.conv2d(
net,
num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = array_ops.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_16.default_image_size = 224
def vgg_19(inputs,
num_classes=1000,
is_training=True,
dropout_keep_prob=0.5,
spatial_squeeze=True,
scope='vgg_19'):
"""Oxford Net VGG 19-Layers version E Example.
Note: All the fully_connected layers have been transformed to conv2d layers.
To use in classification mode, resize input to 224x224.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
num_classes: number of predicted classes.
is_training: whether or not the model is being trained.
dropout_keep_prob: the probability that activations are kept in the dropout
layers during training.
spatial_squeeze: whether or not should squeeze the spatial dimensions of the
outputs. Useful to remove unnecessary dimensions for classification.
scope: Optional scope for the variables.
Returns:
the last op containing the log predictions and end_points dict.
"""
with variable_scope.variable_scope(scope, 'vgg_19', [inputs]) as sc:
end_points_collection = sc.name + '_end_points'
# Collect outputs for conv2d, fully_connected and max_pool2d.
with arg_scope(
[layers.conv2d, layers_lib.fully_connected, layers_lib.max_pool2d],
outputs_collections=end_points_collection):
net = layers_lib.repeat(
inputs, 2, layers.conv2d, 64, [3, 3], scope='conv1')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool1')
net = layers_lib.repeat(net, 2, layers.conv2d, 128, [3, 3], scope='conv2')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool2')
net = layers_lib.repeat(net, 4, layers.conv2d, 256, [3, 3], scope='conv3')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool3')
net = layers_lib.repeat(net, 4, layers.conv2d, 512, [3, 3], scope='conv4')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool4')
net = layers_lib.repeat(net, 4, layers.conv2d, 512, [3, 3], scope='conv5')
net = layers_lib.max_pool2d(net, [2, 2], scope='pool5')
# Use conv2d instead of fully_connected layers.
net = layers.conv2d(net, 4096, [7, 7], padding='VALID', scope='fc6')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout6')
net = layers.conv2d(net, 4096, [1, 1], scope='fc7')
net = layers_lib.dropout(
net, dropout_keep_prob, is_training=is_training, scope='dropout7')
net = layers.conv2d(
net,
num_classes, [1, 1],
activation_fn=None,
normalizer_fn=None,
scope='fc8')
# Convert end_points_collection into a end_point dict.
end_points = utils.convert_collection_to_dict(end_points_collection)
if spatial_squeeze:
net = array_ops.squeeze(net, [1, 2], name='fc8/squeezed')
end_points[sc.name + '/fc8'] = net
return net, end_points
vgg_19.default_image_size = 224
# Alias
vgg_d = vgg_16
vgg_e = vgg_19
|
apache-2.0
|
randomblame/android_kernel_hisense_3.4
|
scripts/tracing/draw_functrace.py
|
14676
|
3560
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <fweisbec@gmail.com>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
gpl-2.0
|
vijayendrabvs/ssl-neutron
|
neutron/tests/unit/brocade/test_brocade_vlan.py
|
24
|
2176
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test vlans alloc/dealloc.
"""
from neutron.db import api as db
from neutron.openstack.common import context
from neutron.plugins.brocade import vlanbm as vlan_bitmap
from neutron.tests import base
class TestVlanBitmap(base.BaseTestCase):
"""exercise Vlan bitmap ."""
def setUp(self):
super(TestVlanBitmap, self).setUp()
db.configure_db()
self.addCleanup(db.clear_db)
self.context = context.get_admin_context()
self.context.session = db.get_session()
def test_vlan(self):
"""test vlan allocation/de-alloc."""
self.vbm_ = vlan_bitmap.VlanBitmap(self.context)
vlan_id = self.vbm_.get_next_vlan(None)
# First vlan is always 2
self.assertEqual(vlan_id, 2)
# next vlan is always 3
vlan_id = self.vbm_.get_next_vlan(None)
self.assertEqual(vlan_id, 3)
# get a specific vlan i.e. 4
vlan_id = self.vbm_.get_next_vlan(4)
self.assertEqual(vlan_id, 4)
# get a specific vlan i.e. 5
vlan_id = self.vbm_.get_next_vlan(5)
self.assertEqual(vlan_id, 5)
# Skip 6
# get a specific vlan i.e. 7
vlan_id = self.vbm_.get_next_vlan(7)
self.assertEqual(vlan_id, 7)
# get a specific vlan i.e. 1900
vlan_id = self.vbm_.get_next_vlan(1900)
self.assertEqual(vlan_id, 1900)
# Release 4 and get next again
self.vbm_.release_vlan(4)
vlan_id = self.vbm_.get_next_vlan(None)
self.assertEqual(vlan_id, 4)
|
apache-2.0
|
VasilyNemkov/percona-xtrabackup
|
storage/innobase/xtrabackup/test/python/testtools/runtest.py
|
42
|
7648
|
# Copyright (c) 2009-2010 testtools developers. See LICENSE for details.
"""Individual test case execution."""
__all__ = [
'MultipleExceptions',
'RunTest',
]
import sys
from testtools.testresult import ExtendedToOriginalDecorator
class MultipleExceptions(Exception):
"""Represents many exceptions raised from some operation.
:ivar args: The sys.exc_info() tuples for each exception.
"""
class RunTest(object):
"""An object to run a test.
RunTest objects are used to implement the internal logic involved in
running a test. TestCase.__init__ stores _RunTest as the class of RunTest
to execute. Passing the runTest= parameter to TestCase.__init__ allows a
different RunTest class to be used to execute the test.
Subclassing or replacing RunTest can be useful to add functionality to the
way that tests are run in a given project.
:ivar case: The test case that is to be run.
:ivar result: The result object a case is reporting to.
:ivar handlers: A list of (ExceptionClass, handler_function) for
exceptions that should be caught if raised from the user
code. Exceptions that are caught are checked against this list in
first to last order. There is a catch-all of 'Exception' at the end
of the list, so to add a new exception to the list, insert it at the
front (which ensures that it will be checked before any existing base
classes in the list. If you add multiple exceptions some of which are
subclasses of each other, add the most specific exceptions last (so
they come before their parent classes in the list).
:ivar exception_caught: An object returned when _run_user catches an
exception.
:ivar _exceptions: A list of caught exceptions, used to do the single
reporting of error/failure/skip etc.
"""
def __init__(self, case, handlers=None):
"""Create a RunTest to run a case.
:param case: A testtools.TestCase test case object.
:param handlers: Exception handlers for this RunTest. These are stored
in self.handlers and can be modified later if needed.
"""
self.case = case
self.handlers = handlers or []
self.exception_caught = object()
self._exceptions = []
def run(self, result=None):
"""Run self.case reporting activity to result.
:param result: Optional testtools.TestResult to report activity to.
:return: The result object the test was run against.
"""
if result is None:
actual_result = self.case.defaultTestResult()
actual_result.startTestRun()
else:
actual_result = result
try:
return self._run_one(actual_result)
finally:
if result is None:
actual_result.stopTestRun()
def _run_one(self, result):
"""Run one test reporting to result.
:param result: A testtools.TestResult to report activity to.
This result object is decorated with an ExtendedToOriginalDecorator
to ensure that the latest TestResult API can be used with
confidence by client code.
:return: The result object the test was run against.
"""
return self._run_prepared_result(ExtendedToOriginalDecorator(result))
def _run_prepared_result(self, result):
"""Run one test reporting to result.
:param result: A testtools.TestResult to report activity to.
:return: The result object the test was run against.
"""
result.startTest(self.case)
self.result = result
try:
self._exceptions = []
self._run_core()
if self._exceptions:
# One or more caught exceptions, now trigger the test's
# reporting method for just one.
e = self._exceptions.pop()
for exc_class, handler in self.handlers:
if isinstance(e, exc_class):
handler(self.case, self.result, e)
break
finally:
result.stopTest(self.case)
return result
def _run_core(self):
"""Run the user supplied test code."""
if self.exception_caught == self._run_user(self.case._run_setup,
self.result):
# Don't run the test method if we failed getting here.
self._run_cleanups(self.result)
return
# Run everything from here on in. If any of the methods raise an
# exception we'll have failed.
failed = False
try:
if self.exception_caught == self._run_user(
self.case._run_test_method, self.result):
failed = True
finally:
try:
if self.exception_caught == self._run_user(
self.case._run_teardown, self.result):
failed = True
finally:
try:
if self.exception_caught == self._run_user(
self._run_cleanups, self.result):
failed = True
finally:
if not failed:
self.result.addSuccess(self.case,
details=self.case.getDetails())
def _run_cleanups(self, result):
"""Run the cleanups that have been added with addCleanup.
See the docstring for addCleanup for more information.
:return: None if all cleanups ran without error,
``exception_caught`` if there was an error.
"""
failing = False
while self.case._cleanups:
function, arguments, keywordArguments = self.case._cleanups.pop()
got_exception = self._run_user(
function, *arguments, **keywordArguments)
if got_exception == self.exception_caught:
failing = True
if failing:
return self.exception_caught
def _run_user(self, fn, *args, **kwargs):
"""Run a user supplied function.
Exceptions are processed by `_got_user_exception`.
:return: Either whatever 'fn' returns or ``exception_caught`` if
'fn' raised an exception.
"""
try:
return fn(*args, **kwargs)
except KeyboardInterrupt:
raise
except:
return self._got_user_exception(sys.exc_info())
def _got_user_exception(self, exc_info, tb_label='traceback'):
"""Called when user code raises an exception.
If 'exc_info' is a `MultipleExceptions`, then we recurse into it
unpacking the errors that it's made up from.
:param exc_info: A sys.exc_info() tuple for the user error.
:param tb_label: An optional string label for the error. If
not specified, will default to 'traceback'.
:return: 'exception_caught' if we catch one of the exceptions that
have handlers in 'handlers', otherwise raise the error.
"""
if exc_info[0] is MultipleExceptions:
for sub_exc_info in exc_info[1].args:
self._got_user_exception(sub_exc_info, tb_label)
return self.exception_caught
try:
e = exc_info[1]
self.case.onException(exc_info, tb_label=tb_label)
finally:
del exc_info
for exc_class, handler in self.handlers:
if isinstance(e, exc_class):
self._exceptions.append(e)
return self.exception_caught
raise e
|
gpl-2.0
|
cortesi/mitmproxy
|
mitmproxy/tools/console/commands.py
|
3
|
4544
|
import urwid
import blinker
import textwrap
from mitmproxy.tools.console import layoutwidget
from mitmproxy.tools.console import signals
HELP_HEIGHT = 5
command_focus_change = blinker.Signal()
class CommandItem(urwid.WidgetWrap):
def __init__(self, walker, cmd, focused):
self.walker, self.cmd, self.focused = walker, cmd, focused
super().__init__(None)
self._w = self.get_widget()
def get_widget(self):
parts = [
("focus", ">> " if self.focused else " "),
("title", self.cmd.path),
("text", " "),
("text", " ".join(self.cmd.paramnames())),
]
if self.cmd.returntype:
parts.append([
("title", " -> "),
("text", self.cmd.retname()),
])
return urwid.AttrMap(
urwid.Padding(urwid.Text(parts)),
"text"
)
def get_edit_text(self):
return self._w[1].get_edit_text()
def selectable(self):
return True
def keypress(self, size, key):
return key
class CommandListWalker(urwid.ListWalker):
def __init__(self, master):
self.master = master
self.index = 0
self.refresh()
def refresh(self):
self.cmds = list(self.master.commands.commands.values())
self.cmds.sort(key=lambda x: x.signature_help())
self.set_focus(self.index)
def get_edit_text(self):
return self.focus_obj.get_edit_text()
def _get(self, pos):
cmd = self.cmds[pos]
return CommandItem(self, cmd, pos == self.index)
def get_focus(self):
return self.focus_obj, self.index
def set_focus(self, index):
cmd = self.cmds[index]
self.index = index
self.focus_obj = self._get(self.index)
command_focus_change.send(cmd.help or "")
def get_next(self, pos):
if pos >= len(self.cmds) - 1:
return None, None
pos = pos + 1
return self._get(pos), pos
def get_prev(self, pos):
pos = pos - 1
if pos < 0:
return None, None
return self._get(pos), pos
class CommandsList(urwid.ListBox):
def __init__(self, master):
self.master = master
self.walker = CommandListWalker(master)
super().__init__(self.walker)
def keypress(self, size, key):
if key == "m_select":
foc, idx = self.get_focus()
signals.status_prompt_command.send(partial=foc.cmd.path + " ")
elif key == "m_start":
self.set_focus(0)
self.walker._modified()
elif key == "m_end":
self.set_focus(len(self.walker.cmds) - 1)
self.walker._modified()
return super().keypress(size, key)
class CommandHelp(urwid.Frame):
def __init__(self, master):
self.master = master
super().__init__(self.widget(""))
self.set_active(False)
command_focus_change.connect(self.sig_mod)
def set_active(self, val):
h = urwid.Text("Command Help")
style = "heading" if val else "heading_inactive"
self.header = urwid.AttrWrap(h, style)
def widget(self, txt):
cols, _ = self.master.ui.get_cols_rows()
return urwid.ListBox(
[urwid.Text(i) for i in textwrap.wrap(txt, cols)]
)
def sig_mod(self, txt):
self.set_body(self.widget(txt))
class Commands(urwid.Pile, layoutwidget.LayoutWidget):
title = "Command Reference"
keyctx = "commands"
def __init__(self, master):
oh = CommandHelp(master)
super().__init__(
[
CommandsList(master),
(HELP_HEIGHT, oh),
]
)
self.master = master
def layout_pushed(self, prev):
self.widget_list[0].walker.refresh()
def keypress(self, size, key):
if key == "m_next":
self.focus_position = (
self.focus_position + 1
) % len(self.widget_list)
self.widget_list[1].set_active(self.focus_position == 1)
key = None
# This is essentially a copypasta from urwid.Pile's keypress handler.
# So much for "closed for modification, but open for extension".
item_rows = None
if len(size) == 2:
item_rows = self.get_item_rows(size, focus = True)
i = self.widget_list.index(self.focus_item)
tsize = self.get_item_size(size, i, True, item_rows)
return self.focus_item.keypress(tsize, key)
|
mit
|
Senseg/Py4A
|
python3-alpha/python3-src/Lib/encodings/uu_codec.py
|
55
|
2763
|
"""Python 'uu_codec' Codec - UU content transfer encoding.
This codec de/encodes from bytes to bytes and is therefore usable with
bytes.transform() and bytes.untransform().
Written by Marc-Andre Lemburg (mal@lemburg.com). Some details were
adapted from uu.py which was written by Lance Ellinghouse and
modified by Jack Jansen and Fredrik Lundh.
"""
import codecs
import binascii
from io import BytesIO
### Codec APIs
def uu_encode(input, errors='strict', filename='<data>', mode=0o666):
assert errors == 'strict'
infile = BytesIO(input)
outfile = BytesIO()
read = infile.read
write = outfile.write
# Encode
write(('begin %o %s\n' % (mode & 0o777, filename)).encode('ascii'))
chunk = read(45)
while chunk:
write(binascii.b2a_uu(chunk))
chunk = read(45)
write(b' \nend\n')
return (outfile.getvalue(), len(input))
def uu_decode(input, errors='strict'):
assert errors == 'strict'
infile = BytesIO(input)
outfile = BytesIO()
readline = infile.readline
write = outfile.write
# Find start of encoded data
while 1:
s = readline()
if not s:
raise ValueError('Missing "begin" line in input data')
if s[:5] == b'begin':
break
# Decode
while True:
s = readline()
if not s or s == b'end\n':
break
try:
data = binascii.a2b_uu(s)
except binascii.Error as v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
data = binascii.a2b_uu(s[:nbytes])
#sys.stderr.write("Warning: %s\n" % str(v))
write(data)
if not s:
raise ValueError('Truncated input data')
return (outfile.getvalue(), len(input))
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return uu_encode(input, errors)
def decode(self, input, errors='strict'):
return uu_decode(input, errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return uu_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return uu_decode(input, self.errors)[0]
class StreamWriter(Codec, codecs.StreamWriter):
charbuffertype = bytes
class StreamReader(Codec, codecs.StreamReader):
charbuffertype = bytes
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='uu',
encode=uu_encode,
decode=uu_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
|
apache-2.0
|
clearlinux/clearstack
|
clearstack/common/swupd.py
|
1
|
1587
|
#
# Copyright (c) 2015 Intel Corporation
#
# Author: Alberto Murillo <alberto.murillo.silva@intel.com>
# Author: Victor Morales <victor.morales@intel.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
from common import util
from common.util import LOG
class Client():
def install(bundle):
if not os.path.isfile("/usr/share/clear/bundles/" + str(bundle)) and \
not os.path.isfile("/usr/share/clear/bundles/"
"openstack-all-in-one"):
LOG.info("Installing {0} bundle".format(bundle))
cmd = "clr_bundle_add -V {0}".format(bundle)
if(os.path.isfile("/bin/swupd")):
cmd = "swupd bundle-add -V {0}".format(bundle)
try:
stdout, stderr = util.run_command(cmd)
if stderr:
LOG.error("swupd bundle-add: {0}\n{1}"
.format(stdout, stderr))
except Exception:
LOG.error("clearstack: cannot install"
" {0} bundle".format(bundle))
|
apache-2.0
|
KAsante95/osf.io
|
website/addons/dropbox/views.py
|
8
|
2917
|
"""Views fo the node settings page."""
# -*- coding: utf-8 -*-
import logging
import httplib as http
from dropbox.rest import ErrorResponse
from dropbox.client import DropboxClient
from urllib3.exceptions import MaxRetryError
from framework.exceptions import HTTPError
from website.addons.dropbox.serializer import DropboxSerializer
from website.addons.base import generic_views
logger = logging.getLogger(__name__)
debug = logger.debug
SHORT_NAME = 'dropbox'
FULL_NAME = 'Dropbox'
dropbox_account_list = generic_views.account_list(
SHORT_NAME,
DropboxSerializer
)
dropbox_import_auth = generic_views.import_auth(
SHORT_NAME,
DropboxSerializer
)
def _get_folders(node_addon, folder_id):
node = node_addon.owner
if folder_id is None:
return [{
'id': '/',
'path': '/',
'addon': 'dropbox',
'kind': 'folder',
'name': '/ (Full Dropbox)',
'urls': {
'folders': node.api_url_for('dropbox_folder_list', folderId='/'),
}
}]
client = DropboxClient(node_addon.external_account.oauth_key)
file_not_found = HTTPError(http.NOT_FOUND, data=dict(message_short='File not found',
message_long='The Dropbox file '
'you requested could not be found.'))
max_retry_error = HTTPError(http.REQUEST_TIMEOUT, data=dict(message_short='Request Timeout',
message_long='Dropbox could not be reached '
'at this time.'))
try:
metadata = client.metadata(folder_id)
except ErrorResponse:
raise file_not_found
except MaxRetryError:
raise max_retry_error
# Raise error if folder was deleted
if metadata.get('is_deleted'):
raise file_not_found
return [
{
'addon': 'dropbox',
'kind': 'folder',
'id': item['path'],
'name': item['path'].split('/')[-1],
'path': item['path'],
'urls': {
'folders': node.api_url_for('dropbox_folder_list', folderId=item['path']),
}
}
for item in metadata['contents']
if item['is_dir']
]
dropbox_folder_list = generic_views.folder_list(
SHORT_NAME,
FULL_NAME,
_get_folders
)
dropbox_get_config = generic_views.get_config(
SHORT_NAME,
DropboxSerializer
)
def _set_folder(node_addon, folder, auth):
uid = folder['id']
node_addon.set_folder(uid, auth=auth)
node_addon.save()
dropbox_set_config = generic_views.set_config(
SHORT_NAME,
FULL_NAME,
DropboxSerializer,
_set_folder
)
dropbox_deauthorize_node = generic_views.deauthorize_node(
SHORT_NAME
)
dropbox_root_folder = generic_views.root_folder(
SHORT_NAME
)
|
apache-2.0
|
pchaigno/grr
|
lib/hunts/standard.py
|
6
|
31511
|
#!/usr/bin/env python
"""Some multiclient flows aka hunts."""
import threading
import logging
from grr.lib import aff4
from grr.lib import config_lib
from grr.lib import data_store
from grr.lib import flow
from grr.lib import rdfvalue
from grr.lib import registry
from grr.lib import stats
from grr.lib import utils
from grr.lib.aff4_objects import collections
from grr.lib.aff4_objects import cronjobs
from grr.lib.hunts import implementation
from grr.lib.rdfvalues import client as rdf_client
from grr.lib.rdfvalues import flows as rdf_flows
from grr.lib.rdfvalues import paths as rdf_paths
from grr.lib.rdfvalues import protodict as rdf_protodict
from grr.lib.rdfvalues import structs as rdf_structs
from grr.parsers import wmi_parser
from grr.proto import flows_pb2
from grr.proto import output_plugin_pb2
class OutputPluginBatchProcessingStatus(rdf_structs.RDFProtoStruct):
"""Describes processing status of a single batch by a hunt output plugin."""
protobuf = output_plugin_pb2.OutputPluginBatchProcessingStatus
class Error(Exception):
pass
class HuntError(Error):
pass
class ResultsProcessingError(Error):
"""This exception is raised when errors happen during results processing."""
def __init__(self):
self.exceptions_by_hunt = {}
super(ResultsProcessingError, self).__init__()
def RegisterSubException(self, hunt_urn, plugin_name, exception):
self.exceptions_by_hunt.setdefault(hunt_urn, {}).setdefault(
plugin_name, []).append(exception)
def __repr__(self):
messages = []
for hunt_urn, exceptions_by_plugin in self.exceptions_by_hunt.items():
for plugin_name, exception in exceptions_by_plugin.items():
messages.append("Exception for hunt %s (plugin %s): %s" %
(hunt_urn, plugin_name, exception))
return "\n".join(messages)
class CreateGenericHuntFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.CreateGenericHuntFlowArgs
class CreateGenericHuntFlow(flow.GRRFlow):
"""Create but don't run a GenericHunt with the given name, args and rules.
As direct write access to the data store is forbidden, we have to use flows to
perform any kind of modifications. This flow delegates ACL checks to
access control manager.
"""
# This flow can run on any client without ACL enforcement (an SUID flow).
ACL_ENFORCED = False
args_type = CreateGenericHuntFlowArgs
@flow.StateHandler()
def Start(self):
"""Create the hunt, in the paused state."""
# Anyone can create the hunt but it will be created in the paused
# state. Permissions are required to actually start it.
with implementation.GRRHunt.StartHunt(
runner_args=self.args.hunt_runner_args,
args=self.args.hunt_args,
token=self.token) as hunt:
# Nothing really to do here - hunts are always created in the paused
# state.
self.Log("User %s created a new %s hunt (%s)",
self.token.username, hunt.state.args.flow_runner_args.flow_name,
hunt.urn)
class CreateAndRunGenericHuntFlow(flow.GRRFlow):
"""Create and run a GenericHunt with the given name, args and rules.
This flow is different to the CreateGenericHuntFlow in that it
immediately runs the hunt it created. This functionality cannot be
offered in a SUID flow or every user could run any flow on any
client without approval by just running a hunt on just that single
client. Thus, this flow must *not* be SUID.
"""
args_type = CreateGenericHuntFlowArgs
@flow.StateHandler()
def Start(self):
"""Create the hunt and run it."""
with implementation.GRRHunt.StartHunt(
runner_args=self.args.hunt_runner_args,
args=self.args.hunt_args,
token=self.token) as hunt:
hunt.Run()
self.Log("User %s created a new %s hunt (%s)",
self.token.username, hunt.state.args.flow_runner_args.flow_name,
hunt.urn)
class StartHuntFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.StartHuntFlowArgs
class StartHuntFlow(flow.GRRFlow):
"""Start already created hunt with given id.
As direct write access to the data store is forbidden, we have to use flows to
perform any kind of modifications. This flow delegates ACL checks to
access control manager.
"""
# This flow can run on any client without ACL enforcement (an SUID flow).
ACL_ENFORCED = False
args_type = StartHuntFlowArgs
@flow.StateHandler()
def Start(self):
"""Find a hunt, perform a permissions check and run it."""
# Check permissions first, and if ok, just proceed.
data_store.DB.security_manager.CheckHuntAccess(
self.token.RealUID(), self.args.hunt_urn)
with aff4.FACTORY.Open(
self.args.hunt_urn, aff4_type="GRRHunt",
mode="rw", token=self.token) as hunt:
hunt.Run()
class DeleteHuntFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.DeleteHuntFlowArgs
class DeleteHuntFlow(flow.GRRFlow):
"""Delete an existing hunt, if it hasn't done anything yet."""
ACL_ENFORCED = False
args_type = DeleteHuntFlowArgs
@flow.StateHandler()
def Start(self):
with aff4.FACTORY.Open(
self.args.hunt_urn, aff4_type="GRRHunt", mode="rw",
token=self.token) as hunt:
# Check for approval if the hunt was created by somebody else.
if self.token.username != hunt.creator:
data_store.DB.security_manager.CheckHuntAccess(
self.token.RealUID(), self.args.hunt_urn)
if hunt.GetRunner().IsHuntStarted():
raise RuntimeError("Unable to delete a running hunt.")
if (not config_lib.CONFIG["AdminUI.allow_hunt_results_delete"] and
hunt.client_count):
raise RuntimeError("Unable to delete a hunt with results while "
"AdminUI.allow_hunt_results_delete is disabled.")
aff4.FACTORY.Delete(self.args.hunt_urn, token=self.token)
class StopHuntFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.StopHuntFlowArgs
class StopHuntFlow(flow.GRRFlow):
"""Run already created hunt with given id."""
# This flow can run on any client without ACL enforcement (an SUID flow).
ACL_ENFORCED = False
args_type = StopHuntFlowArgs
@flow.StateHandler()
def Start(self):
"""Find a hunt, perform a permissions check and pause it."""
# Check permissions first, and if ok, just proceed.
data_store.DB.security_manager.CheckHuntAccess(
self.token.RealUID(), self.args.hunt_urn)
with aff4.FACTORY.Open(
self.args.hunt_urn, aff4_type="GRRHunt", mode="rw",
token=self.token) as hunt:
hunt.Stop()
class ModifyHuntFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.ModifyHuntFlowArgs
class ModifyHuntFlow(flow.GRRFlow):
"""Modify already created hunt with given id.
As direct write access to the data store is forbidden, we have to use flows to
perform any kind of modifications. This flow delegates ACL checks to
access control manager.
"""
# This flow can run on any client without ACL enforcement (an SUID flow).
ACL_ENFORCED = False
args_type = ModifyHuntFlowArgs
@flow.StateHandler()
def Start(self):
"""Find a hunt, perform a permissions check and modify it."""
with aff4.FACTORY.Open(
self.args.hunt_urn, aff4_type="GRRHunt",
mode="rw", token=self.token) as hunt:
runner = hunt.GetRunner()
data_store.DB.security_manager.CheckHuntAccess(
self.token.RealUID(), hunt.urn)
# Make sure the hunt is not running:
if runner.IsHuntStarted():
raise RuntimeError("Unable to modify a running hunt.")
# Record changes in the audit event
changes = []
if runner.context.expires != self.args.expiry_time:
changes.append("Expires: Old=%s, New=%s" % (runner.context.expires,
self.args.expiry_time))
if runner.args.client_limit != self.args.client_limit:
changes.append("Client Limit: Old=%s, New=%s" % (
runner.args.client_limit, self.args.client_limit))
description = ", ".join(changes)
event = flow.AuditEvent(user=self.token.username,
action="HUNT_MODIFIED",
urn=self.args.hunt_urn,
description=description)
flow.Events.PublishEvent("Audit", event, token=self.token)
# Just go ahead and change the hunt now.
runner.context.expires = self.args.expiry_time
runner.args.client_limit = self.args.client_limit
class CheckHuntAccessFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.CheckHuntAccessFlowArgs
class CheckHuntAccessFlow(flow.GRRFlow):
# This flow can run on any client without ACL enforcement (an SUID flow).
ACL_ENFORCED = False
args_type = CheckHuntAccessFlowArgs
@flow.StateHandler()
def Start(self):
if not self.args.hunt_urn:
raise RuntimeError("hunt_urn was not provided.")
if self.args.hunt_urn.Split()[0] != "hunts":
raise RuntimeError("invalid namespace in the hunt urn")
data_store.DB.security_manager.CheckHuntAccess(
self.token.RealUID(), self.args.hunt_urn)
class SampleHuntArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.SampleHuntArgs
class SampleHunt(implementation.GRRHunt):
"""This hunt just looks for the presence of a evil.txt in /tmp.
Scheduling the hunt works like this:
> hunt = hunts.SampleHunt()
# We want to schedule on clients that run windows and OS_RELEASE 7.
> int_rule = rdf_foreman.ForemanAttributeInteger(
attribute_name=client.Schema.OS_RELEASE.name,
operator=rdf_foreman.ForemanAttributeInteger.Operator.EQUAL,
value=7)
> regex_rule = hunts.GRRHunt.MATCH_WINDOWS
# Run the hunt when both those rules match.
> hunt.AddRule([int_rule, regex_rule])
# Now we can test how many clients in the database match the rules.
# Warning, this might take some time since it looks at all the stored clients.
> hunt.TestRules()
Out of 3171 checked clients, 2918 matched the given rule set.
# This looks good, we exclude the few Linux / Mac clients in the datastore.
# Now we can start the hunt. Note that this hunt is actually designed for
# Linux / Mac clients so the example rules should not be used for this hunt.
> hunt.Run()
"""
args_type = SampleHuntArgs
@flow.StateHandler()
def RunClient(self, responses):
pathspec = rdf_paths.PathSpec(pathtype=rdf_paths.PathSpec.PathType.OS,
path=self.args.filename)
for client_id in responses:
self.CallFlow("GetFile", pathspec=pathspec, next_state="StoreResults",
client_id=client_id)
@flow.StateHandler()
def StoreResults(self, responses):
"""Stores the responses."""
client_id = responses.request.client_id
if responses.success:
logging.info("Client %s has a file %s.", client_id,
self.args.filename)
else:
logging.info("Client %s has no file %s.", client_id,
self.args.filename)
self.MarkClientDone(client_id)
class HuntResultsMetadata(aff4.AFF4Object):
"""Metadata AFF4 object used by CronHuntOutputFlow."""
class SchemaCls(aff4.AFF4Object.SchemaCls):
"""AFF4 schema for CronHuntOutputMetadata."""
NUM_PROCESSED_RESULTS = aff4.Attribute(
"aff4:num_processed_results", rdfvalue.RDFInteger,
"Number of hunt results already processed by the cron job.",
versioned=False, default=0)
OUTPUT_PLUGINS = aff4.Attribute(
"aff4:output_plugins_state", rdf_flows.FlowState,
"Pickled output plugins.", versioned=False)
class ProcessHuntResultsCronFlowArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.ProcessHuntResultsCronFlowArgs
class ProcessHuntResultsCronFlow(cronjobs.SystemCronFlow):
"""Periodic cron flow that processes hunts results with output plugins."""
frequency = rdfvalue.Duration("5m")
lifetime = rdfvalue.Duration("40m")
args_type = ProcessHuntResultsCronFlowArgs
DEFAULT_BATCH_SIZE = 1000
MAX_REVERSED_RESULTS = 500000
def CheckIfRunningTooLong(self):
if self.state.args.max_running_time:
elapsed = (rdfvalue.RDFDatetime().Now().AsSecondsFromEpoch() -
self.start_time.AsSecondsFromEpoch())
if elapsed > self.state.args.max_running_time:
return True
return False
def StatusCollectionUrn(self, hunt_urn):
return hunt_urn.Add("OutputPluginsStatus")
def ErrorsCollectionUrn(self, hunt_urn):
return hunt_urn.Add("OutputPluginsErrors")
def ApplyPluginsToBatch(self, hunt_urn, plugins, batch, batch_index):
exceptions_by_plugin = {}
for plugin_def, plugin in plugins:
logging.debug("Processing hunt %s with %s, batch %d", hunt_urn,
plugin_def.plugin_name, batch_index)
try:
plugin.ProcessResponses(batch)
stats.STATS.IncrementCounter("hunt_results_ran_through_plugin",
delta=len(batch),
fields=[plugin_def.plugin_name])
plugin_status = OutputPluginBatchProcessingStatus(
plugin_descriptor=plugin_def,
status="SUCCESS",
batch_index=batch_index,
batch_size=len(batch))
except Exception as e: # pylint: disable=broad-except
stats.STATS.IncrementCounter("hunt_output_plugin_errors",
fields=[plugin_def.plugin_name])
plugin_status = OutputPluginBatchProcessingStatus(
plugin_descriptor=plugin_def,
status="ERROR",
summary=utils.SmartStr(e),
batch_index=batch_index,
batch_size=len(batch))
logging.exception("Error processing hunt results: hunt %s, "
"plugin %s, batch %d", hunt_urn,
plugin_def.plugin_name, batch_index)
self.Log("Error processing hunt results (hunt %s, "
"plugin %s, batch %d): %s" %
(hunt_urn, plugin_def.plugin_name, batch_index, e))
exceptions_by_plugin[plugin_def] = e
collections.PackedVersionedCollection.AddToCollection(
self.StatusCollectionUrn(hunt_urn),
[plugin_status], sync=False, token=self.token)
if plugin_status.status == plugin_status.Status.ERROR:
collections.PackedVersionedCollection.AddToCollection(
self.ErrorsCollectionUrn(hunt_urn),
[plugin_status], sync=False, token=self.token)
return exceptions_by_plugin
def FlushPlugins(self, hunt_urn, plugins):
flush_exceptions = {}
for plugin_def, plugin in plugins:
try:
plugin.Flush()
except Exception as e: # pylint: disable=broad-except
logging.exception("Error flushing hunt results: hunt %s, "
"plugin %s", hunt_urn, str(plugin))
self.Log("Error processing hunt results (hunt %s, "
"plugin %s): %s" % (hunt_urn, str(plugin), e))
flush_exceptions[plugin_def] = e
return flush_exceptions
def ProcessHuntResults(self, results, freeze_timestamp):
plugins_exceptions = {}
hunt_urn = results.Get(results.Schema.RESULTS_SOURCE)
metadata_urn = hunt_urn.Add("ResultsMetadata")
batch_size = self.state.args.batch_size or self.DEFAULT_BATCH_SIZE
batches = utils.Grouper(results.GenerateUncompactedItems(
max_reversed_results=self.MAX_REVERSED_RESULTS,
timestamp=freeze_timestamp), batch_size)
with aff4.FACTORY.Open(
metadata_urn, mode="rw", token=self.token) as metadata_obj:
output_plugins = metadata_obj.Get(metadata_obj.Schema.OUTPUT_PLUGINS)
num_processed = int(metadata_obj.Get(
metadata_obj.Schema.NUM_PROCESSED_RESULTS))
used_plugins = []
for batch_index, batch in enumerate(batches):
batch = list(batch)
num_processed += len(batch)
if not used_plugins:
for _, (plugin_def, state) in output_plugins.data.iteritems():
# TODO(user): Remove as soon as migration to new-style
# output plugins is completed.
if not hasattr(plugin_def, "GetPluginForState"):
logging.error("Invalid plugin_def: %s", plugin_def)
continue
used_plugins.append((plugin_def,
plugin_def.GetPluginForState(state)))
batch_exceptions = self.ApplyPluginsToBatch(hunt_urn, used_plugins,
batch, batch_index)
if batch_exceptions:
for key, value in batch_exceptions.items():
plugins_exceptions.setdefault(key, []).append(value)
self.HeartBeat()
# If this flow is working for more than max_running_time - stop
# processing.
if self.CheckIfRunningTooLong():
self.Log("Running for too long, skipping rest of batches for %s",
hunt_urn)
break
if not used_plugins:
logging.debug("Got notification, but no results were processed for %s.",
hunt_urn)
flush_exceptions = self.FlushPlugins(hunt_urn, used_plugins)
plugins_exceptions.update(flush_exceptions)
metadata_obj.Set(metadata_obj.Schema.OUTPUT_PLUGINS(output_plugins))
metadata_obj.Set(metadata_obj.Schema.NUM_PROCESSED_RESULTS(num_processed))
return plugins_exceptions
@flow.StateHandler()
def Start(self):
"""Start state of the flow."""
# If max_running_time is not specified, set it to 60% of this job's
# lifetime.
if not self.state.args.max_running_time:
self.state.args.max_running_time = rdfvalue.Duration(
"%ds" % int(ProcessHuntResultsCronFlow.lifetime.seconds * 0.6))
self.start_time = rdfvalue.RDFDatetime().Now()
exceptions_by_hunt = {}
freeze_timestamp = rdfvalue.RDFDatetime().Now()
for results_urn in aff4.ResultsOutputCollection.QueryNotifications(
timestamp=freeze_timestamp, token=self.token):
aff4.ResultsOutputCollection.DeleteNotifications(
[results_urn], end=results_urn.age, token=self.token)
# Feed the results to output plugins
try:
results = aff4.FACTORY.Open(
results_urn, aff4_type="ResultsOutputCollection", token=self.token)
except aff4.InstantiationError: # Collection does not exist.
continue
exceptions_by_plugin = self.ProcessHuntResults(results, freeze_timestamp)
if exceptions_by_plugin:
hunt_urn = results.Get(results.Schema.RESULTS_SOURCE)
exceptions_by_hunt[hunt_urn] = exceptions_by_plugin
lease_time = config_lib.CONFIG["Worker.compaction_lease_time"]
try:
with aff4.FACTORY.OpenWithLock(results_urn, blocking=False,
aff4_type="ResultsOutputCollection",
lease_time=lease_time,
token=self.token) as results:
num_compacted = results.Compact(callback=self.HeartBeat,
timestamp=freeze_timestamp)
stats.STATS.IncrementCounter("hunt_results_compacted",
delta=num_compacted)
logging.debug("Compacted %d results in %s.", num_compacted,
results_urn)
except aff4.LockError:
logging.error("Trying to compact a collection that's already "
"locked: %s", results_urn)
stats.STATS.IncrementCounter("hunt_results_compaction_locking_errors")
if self.CheckIfRunningTooLong():
self.Log("Running for too long, skipping rest of hunts.")
break
if exceptions_by_hunt:
e = ResultsProcessingError()
for hunt_urn, exceptions_by_plugin in exceptions_by_hunt.items():
for plugin_name, exceptions in exceptions_by_plugin.items():
for exception in exceptions:
e.RegisterSubException(hunt_urn, plugin_name, exception)
raise e
class GenericHuntArgs(rdf_structs.RDFProtoStruct):
"""Arguments to the generic hunt."""
protobuf = flows_pb2.GenericHuntArgs
def Validate(self):
self.flow_runner_args.Validate()
self.flow_args.Validate()
def GetFlowArgsClass(self):
if self.flow_runner_args.flow_name:
flow_cls = flow.GRRFlow.classes.get(self.flow_runner_args.flow_name)
if flow_cls is None:
raise ValueError("Flow '%s' not known by this implementation." %
self.flow_runner_args.flow_name)
# The required protobuf for this class is in args_type.
return flow_cls.args_type
class GenericHunt(implementation.GRRHunt):
"""This is a hunt to start any flow on multiple clients."""
args_type = GenericHuntArgs
@property
def started_flows_collection_urn(self):
return self.urn.Add("StartedFlows")
@flow.StateHandler()
def Start(self):
super(GenericHunt, self).Start()
with aff4.FACTORY.Create(self.started_flows_collection_urn,
"PackedVersionedCollection",
mode="w", token=self.token):
pass
@flow.StateHandler(next_state=["MarkDone"])
def RunClient(self, responses):
started_flows = []
# Just run the flow on this client.
for client_id in responses:
flow_urn = self.CallFlow(
args=self.state.args.flow_args, client_id=client_id,
next_state="MarkDone", sync=False,
runner_args=self.state.args.flow_runner_args)
started_flows.append(flow_urn)
collections.PackedVersionedCollection.AddToCollection(
self.started_flows_collection_urn, started_flows, sync=False,
token=self.token)
def Stop(self):
super(GenericHunt, self).Stop()
started_flows = aff4.FACTORY.Create(self.started_flows_collection_urn,
"PackedVersionedCollection",
mode="r", token=self.token)
self.Log("Hunt stop. Terminating all the started flows.")
num_terminated_flows = 0
for started_flow in started_flows:
flow.GRRFlow.MarkForTermination(started_flow,
reason="Parent hunt stopped.",
token=self.token)
num_terminated_flows += 1
self.Log("%d flows terminated.", num_terminated_flows)
def GetLaunchedFlows(self, flow_type="outstanding"):
"""Returns the session IDs of all the flows we launched.
Args:
flow_type: The type of flows to fetch. Can be "all", "outstanding" or
"finished".
Returns:
A list of flow URNs.
"""
result = None
all_clients = set(self.ListAllClients())
finished_clients = set(self.ListFinishedClients())
outstanding_clients = all_clients - finished_clients
if flow_type == "all":
result = all_clients
elif flow_type == "finished":
result = finished_clients
elif flow_type == "outstanding":
result = outstanding_clients
# Now get the flows for all these clients.
flows = aff4.FACTORY.MultiListChildren(
[self.urn.Add(x.Basename()) for x in result])
return [x[0] for _, x in flows]
def StoreResourceUsage(self, responses, client_id):
"""Open child flow and account its' reported resource usage."""
flow_path = responses.status.child_session_id
status = responses.status
resources = rdf_client.ClientResources()
resources.client_id = client_id
resources.session_id = flow_path
resources.cpu_usage.user_cpu_time = status.cpu_time_used.user_cpu_time
resources.cpu_usage.system_cpu_time = status.cpu_time_used.system_cpu_time
resources.network_bytes_sent = status.network_bytes_sent
self.state.context.usage_stats.RegisterResources(resources)
@flow.StateHandler()
def MarkDone(self, responses):
"""Mark a client as done."""
client_id = responses.request.client_id
self.StoreResourceUsage(responses, client_id)
self.AddResultsToCollection(responses, client_id)
self.MarkClientDone(client_id)
class FlowRequest(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.FlowRequest
def GetFlowArgsClass(self):
if self.runner_args.flow_name:
flow_cls = flow.GRRFlow.classes.get(self.runner_args.flow_name)
if flow_cls is None:
raise ValueError("Flow %s not known by this implementation." %
self.runner_args.flow_name)
# The required protobuf for this class is in args_type.
return flow_cls.args_type
class VariableGenericHuntArgs(rdf_structs.RDFProtoStruct):
protobuf = flows_pb2.VariableGenericHuntArgs
class VariableGenericHunt(GenericHunt):
"""A generic hunt using different flows for each client."""
args_type = VariableGenericHuntArgs
def SetDescription(self, description=None):
self.state.context.args.description = description or "Variable Generic Hunt"
@flow.StateHandler(next_state=["MarkDone"])
def RunClient(self, responses):
started_flows = []
for client_id in responses:
for flow_request in self.state.args.flows:
for requested_client_id in flow_request.client_ids:
if requested_client_id == client_id:
flow_urn = self.CallFlow(
args=flow_request.args,
runner_args=flow_request.runner_args,
next_state="MarkDone", client_id=client_id)
started_flows.append(flow_urn)
collections.PackedVersionedCollection.AddToCollection(
self.started_flows_collection_urn, started_flows, sync=False,
token=self.token)
def ManuallyScheduleClients(self, token=None):
"""Schedule all flows without using the Foreman.
Since we know all the client ids to run on we might as well just schedule
all the flows and wait for the results.
Args:
token: A datastore access token.
"""
client_ids = set()
for flow_request in self.state.args.flows:
for client_id in flow_request.client_ids:
client_ids.add(client_id)
self.StartClients(self.session_id, client_ids, token=token)
class StatsHunt(implementation.GRRHunt):
"""A Hunt to continuously collect stats from all clients.
This hunt is very unusual, it doesn't call any flows, instead using CallClient
directly. This is done to minimise the message handling and server load
caused by collecting this information with a short time period.
TODO(user): implement a aff4 object cleanup cron that we can use to
automatically delete the collections generated by this hunt.
"""
args_type = GenericHuntArgs
client_list = None
client_list_lock = None
def Start(self, **kwargs):
super(StatsHunt, self).Start(**kwargs)
# Force all client communication to be LOW_PRIORITY. This ensures that
# clients do not switch to fast poll mode when returning stats messages.
self.runner.args.priority = "LOW_PRIORITY"
self.runner.args.require_fastpoll = False
# The first time we're loaded we create these variables here. After we are
# sent to storage we recreate them in the Load method.
self._MakeLock()
def _MakeLock(self):
if self.client_list is None:
self.client_list = []
if self.client_list_lock is None:
self.client_list_lock = threading.RLock()
def Load(self):
super(StatsHunt, self).Load()
self._MakeLock()
def Save(self):
# Make sure we call any remaining clients before we are saved
with self.client_list_lock:
call_list, self.client_list = self.client_list, None
self._CallClients(call_list)
super(StatsHunt, self).Save()
@flow.StateHandler()
def RunClient(self, responses):
client_call_list = self._GetCallClientList(responses)
self._CallClients(client_call_list)
def _GetCallClientList(self, client_ids):
"""Use self.client_list to determine clients that need calling.
Batch calls into StatsHunt.ClientBatchSize (or larger) chunks.
Args:
client_ids: list of client ids
Returns:
list of client IDs that should be called with callclient.
"""
call_list = []
with self.client_list_lock:
self.client_list.extend(client_ids)
if len(self.client_list) >= config_lib.CONFIG[
"StatsHunt.ClientBatchSize"]:
# We have enough clients ready to process, take a copy of the list so we
# can release the lock.
call_list, self.client_list = self.client_list, []
return call_list
def _CallClients(self, client_id_list):
now = rdfvalue.RDFDatetime().Now()
due = now + rdfvalue.Duration(
config_lib.CONFIG["StatsHunt.CollectionInterval"])
for client in aff4.FACTORY.MultiOpen(client_id_list,
token=self.token):
if client.Get(client.SchemaCls.SYSTEM) == "Windows":
wmi_query = ("Select * from Win32_NetworkAdapterConfiguration where"
" IPEnabled=1")
self.CallClient("WmiQuery", query=wmi_query,
next_state="StoreResults", client_id=client.urn,
start_time=due)
else:
self.CallClient("EnumerateInterfaces", next_state="StoreResults",
client_id=client.urn, start_time=due)
def ProcessInterface(self, response):
"""Filter out localhost interfaces."""
if response.mac_address != "000000000000" and response.ifname != "lo":
return response
@flow.StateHandler()
def StoreResults(self, responses):
"""Stores the responses."""
client_id = responses.request.client_id
# TODO(user): Should we record client usage stats?
processed_responses = []
wmi_interface_parser = wmi_parser.WMIInterfacesParser()
for response in responses:
if isinstance(response, rdf_client.Interface):
processed_responses.extend(
filter(None, [self.ProcessInterface(response)]))
elif isinstance(response, rdf_protodict.Dict):
# This is a result from the WMIQuery call
processed_responses.extend(list(
wmi_interface_parser.Parse(None, response, None)))
new_responses = flow.FakeResponses(processed_responses,
responses.request_data)
new_responses.success = responses.success
new_responses.status = responses.status
self.AddResultsToCollection(new_responses, client_id)
# Respect both the expiry and pause controls, since this will otherwise run
# forever. Pausing will effectively stop this hunt, and a new one will need
# to be created.
if self.runner.IsHuntStarted():
# Re-issue the request to the client for the next collection.
client_call_list = self._GetCallClientList([client_id])
if client_call_list:
self._CallClients(client_call_list)
else:
self.MarkClientDone(client_id)
class StandardHuntInitHook(registry.InitHook):
pre = ["StatsInit"]
def RunOnce(self):
"""Register standard hunt-related stats."""
stats.STATS.RegisterCounterMetric("hunt_output_plugin_errors",
fields=[("plugin", str)])
stats.STATS.RegisterCounterMetric("hunt_results_ran_through_plugin",
fields=[("plugin", str)])
stats.STATS.RegisterCounterMetric("hunt_results_compacted")
stats.STATS.RegisterCounterMetric("hunt_results_compaction_locking_errors")
|
apache-2.0
|
lunafeng/django
|
tests/user_commands/management/commands/hal.py
|
372
|
1024
|
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Useless command."
def add_arguments(self, parser):
parser.add_argument('args', metavar='app_label', nargs='*',
help='Specify the app label(s) to works on.')
parser.add_argument('--empty', action='store_true', dest='empty', default=False,
help="Do nothing.")
def handle(self, *app_labels, **options):
app_labels = set(app_labels)
if options['empty']:
self.stdout.write("Dave, I can't do that.")
return
if not app_labels:
raise CommandError("I'm sorry Dave, I'm afraid I can't do that.")
# raise an error if some --parameter is flowing from options to args
for app_label in app_labels:
if app_label.startswith('--'):
raise CommandError("Sorry, Dave, I can't let you do that.")
self.stdout.write("Dave, my mind is going. I can feel it. I can feel it.")
|
bsd-3-clause
|
luogangyi/Ceilometer-oVirt
|
ceilometer/collector.py
|
2
|
5792
|
#
# Copyright 2012-2013 eNovance <licensing@enovance.com>
#
# Author: Julien Danjou <julien@danjou.info>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import socket
import msgpack
from oslo.config import cfg
import oslo.messaging
from oslo.utils import units
from ceilometer import dispatcher
from ceilometer.dispatcher import redis_database
from ceilometer import messaging
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common.gettextutils import _LE
from ceilometer.openstack.common import log
from ceilometer.openstack.common import service as os_service
OPTS = [
cfg.StrOpt('udp_address',
default='0.0.0.0',
help='Address to which the UDP socket is bound. Set to '
'an empty string to disable.'),
cfg.IntOpt('udp_port',
default=4952,
help='Port to which the UDP socket is bound.'),
cfg.BoolOpt('requeue_sample_on_dispatcher_error',
default=False,
help='Requeue the sample on the collector sample queue '
'when the collector fails to dispatch it. This is only valid '
'if the sample come from the notifier publisher'),
]
cfg.CONF.register_opts(OPTS, group="collector")
cfg.CONF.import_opt('metering_topic', 'ceilometer.publisher.messaging',
group="publisher_rpc")
cfg.CONF.import_opt('metering_topic', 'ceilometer.publisher.messaging',
group="publisher_notifier")
LOG = log.getLogger(__name__)
class CollectorService(os_service.Service):
"""Listener for the collector service."""
def start(self):
"""Bind the UDP socket and handle incoming data."""
# ensure dispatcher is configured before starting other services
self.dispatcher_manager = dispatcher.load_dispatcher_manager()
self.redis_dispatcher = redis_database.RedisDispatcher(cfg.CONF)
self.rpc_server = None
self.notification_server = None
super(CollectorService, self).start()
if cfg.CONF.collector.udp_address:
self.tg.add_thread(self.start_udp)
allow_requeue = cfg.CONF.collector.requeue_sample_on_dispatcher_error
transport = messaging.get_transport(optional=True)
if transport:
self.rpc_server = messaging.get_rpc_server(
transport, cfg.CONF.publisher_rpc.metering_topic, self)
target = oslo.messaging.Target(
topic=cfg.CONF.publisher_notifier.metering_topic)
self.notification_server = messaging.get_notification_listener(
transport, [target], [self],
allow_requeue=allow_requeue)
self.rpc_server.start()
self.notification_server.start()
if not cfg.CONF.collector.udp_address:
# Add a dummy thread to have wait() working
self.tg.add_timer(604800, lambda: None)
def start_udp(self):
udp = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
udp.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
udp.bind((cfg.CONF.collector.udp_address,
cfg.CONF.collector.udp_port))
self.udp_run = True
while self.udp_run:
# NOTE(jd) Arbitrary limit of 64K because that ought to be
# enough for anybody.
data, source = udp.recvfrom(64 * units.Ki)
try:
sample = msgpack.loads(data, encoding='utf-8')
except Exception:
LOG.warn(_("UDP: Cannot decode data sent by %s"), str(source))
else:
try:
LOG.debug(_("UDP: Storing %s"), str(sample))
self.redis_dispatcher.record_metering_data(sample)
#self.dispatcher_manager.map_method('record_metering_data',
# sample)
except Exception:
LOG.exception(_("UDP: Unable to store meter"))
def stop(self):
self.udp_run = False
if self.rpc_server:
self.rpc_server.stop()
if self.notification_server:
self.notification_server.stop()
super(CollectorService, self).stop()
def sample(self, ctxt, publisher_id, event_type, payload, metadata):
"""RPC endpoint for notification messages
When another service sends a notification over the message
bus, this method receives it.
"""
try:
self.dispatcher_manager.map_method('record_metering_data',
data=payload)
except Exception:
if cfg.CONF.collector.requeue_sample_on_dispatcher_error:
LOG.exception(_LE("Dispatcher failed to handle the sample, "
"requeue it."))
return oslo.messaging.NotificationResult.REQUEUE
raise
def record_metering_data(self, context, data):
"""RPC endpoint for messages we send to ourselves.
When the notification messages are re-published through the
RPC publisher, this method receives them for processing.
"""
self.dispatcher_manager.map_method('record_metering_data', data=data)
|
apache-2.0
|
duyet-website/api.duyet.net
|
lib/faker/providers/address/ko_KR/__init__.py
|
3
|
12654
|
# coding=utf-8
from __future__ import unicode_literals
from .. import Provider as AddressProvider
class Provider(AddressProvider):
"""
Korean Address Provider
=======================
Korea has two address and postal code system.
Address
-------
- Address based on land parcel numbers
(지번 주소, OLD, but someone use consistently)
- Address based on road names and building numbers (도로명 주소, NEW)
:meth:`land_address` generate Address based on land parcel numbers and
:meth:`road_address` generate Address based on road names and building
numbers.
Postal code
-----------
- Old postal code (6-digit, OLD and dead)
- New postal code (5-digit, New)
:meth:`old_postal_code` and :meth:`postcode` generate old 6-digit code
and :meth:`postal_code` generate newer 5-digit code.
Reference
---------
- `Official Confirmation Prividing that Old and New Addresses are Identical`__
(warn: cert error)
__ https://www.juso.go.kr/addridentity/AddrIdentityHelp.htm
"""
building_suffixes = (
'빌라',
'아파트',
'연립',
'마을',
'타운',
'타워',
)
road_suffixes = ('로', '길', '거리', '가')
town_suffixes = ('동', '리', '마을')
postcode_formats = ('###-###',)
new_postal_code_formats = ('#####',)
metropolitan_cities = (
'서울특별시',
'부산광역시',
'대구광역시',
'인천광역시',
'광주광역시',
'대전광역시',
'울산광역시',
'세종특별자치시',
)
provinces = (
'경기도',
'강원도',
'충청북도',
'충청남도',
'전라북도',
'전라남도',
'경상북도',
'경상남도',
'제주특별자치도',
)
cities = (
'파주시',
'수원시',
'수원시 권선구',
'수원시 팔달구',
'수원시 영통구',
'성남시',
'성남시 수정구',
'성남시 중원구',
'화성시',
'성남시 분당구',
'안양시',
'안양시 만안구',
'안양시 동안구',
'부천시',
'부천시 원미구',
'부천시 소사구',
'부천시 오정구',
'광명시',
'평택시',
'이천시',
'동두천시',
'안산시',
'안산시 상록구',
'안산시 단원구',
'안성시',
'고양시',
'고양시 덕양구',
'고양시 일산동구',
'고양시 일산서구',
'과천시',
'구리시',
'남양주시',
'오산시',
'시흥시',
'군포시',
'의왕시',
'하남시',
'김포시',
'용인시',
'용인시 처인구',
'용인시 기흥구',
'용인시 수지구',
'연천군',
'가평군',
'양평군',
'광주시',
'포천시',
'양주시',
'수원시 장안구',
'의정부시',
'여주시',
)
road_names = (
'압구정',
'도산대',
'학동',
'봉은사',
'테헤란',
'역삼',
'논현',
'언주',
'강남대',
'양재천',
'삼성',
'영동대',
'개포',
'선릉',
'반포대',
'서초중앙',
'서초대',
'잠실',
'석촌호수',
'백제고분',
'가락',
'오금',
)
boroughs = (
'종로구',
'중구',
'용산구',
'성동구',
'광진구',
'동대문구',
'중랑구',
'성북구',
'강북구',
'도봉구',
'노원구',
'은평구',
'서대문구',
'마포구',
'양천구',
'강서구',
'구로구',
'금천구',
'영등포구',
'동작구',
'관악구',
'서초구',
'강남구',
'송파구',
'강동구',
'동구',
'서구',
'남구',
'북구',
)
countries = ('가나', '가봉', '가이아나', '감비아', '과테말라', '그레나다', '그리스', '기니', '기니비사우',
'나미비아', '나우루', '나이지리아', '남수단', '남아프리카 공화국', '네덜란드 왕국', '네팔',
'노르웨이', '뉴질랜드', '니제르', '니카라과', '대한민국', '덴마크', '도미니카 공화국',
'도미니카 연방', '독일', '동티모르', '라오스', '라이베리아', '라트비아', '러시아', '레바논',
'레소토', '루마니아', '룩셈부르크', '르완다', '리비아', '리투아니아', '리히텐슈타인',
'마다가스카르', '마셜 제도', '마케도니아 공화국', '말라위', '말레이시아', '말리', '멕시코',
'모나코', '모로코', '모리셔스', '모리타니', '모잠비크', '몬테네그로', '몰도바', '몰디브',
'몰타', '몽골', '미국', '미얀마', '미크로네시아 연방', '바누아투', '바레인', '바베이도스',
'바하마', '방글라데시', '베냉', '베네수엘라', '베트남', '벨기에', '벨라루스', '벨리즈',
'보스니아 헤르체고비나', '보츠와나', '볼리비아', '부룬디', '부르키나파소', '부탄', '불가리아',
'브라질', '브루나이', '사모아', '사우디아라비아', '산마리노', '상투메 프린시페', '세네갈',
'세르비아', '세이셸', '세인트루시아', '세인트빈센트 그레나딘', '세인트키츠 네비스',
'소말리아', '솔로몬 제도', '수단', '수리남', '스리랑카', '스와질란드', '스웨덴', '스위스',
'스페인', '슬로바키아', '슬로베니아', '시리아', '시에라리온 공화국', '싱가포르',
'아랍에미리트', '아르메니아', '아르헨티나', '아이슬란드', '아이티', '아일랜드',
'아제르바이잔', '아프가니스탄', '안도라', '알바니아', '알제리', '앙골라', '앤티가 바부다',
'에리트레아', '에스토니아', '에콰도르', '에티오피아', '엘살바도르', '영국', '예멘', '오만',
'오스트레일리아', '오스트리아', '온두라스', '요르단', '우간다', '우루과이', '우즈베키스탄',
'우크라이나', '이라크', '이란', '이스라엘', '이집트', '이탈리아', '인도네시아', '일본',
'자메이카', '잠비아', '적도 기니', '조선민주주의인민공화국', '조지아', '중앙아프리카 공화국',
'중화인민공화국', '지부티', '짐바브웨', '차드', '체코', '칠레', '카메룬', '카보베르데',
'카자흐스탄', '카타르', '캄보디아', '캐나다', '케냐', '코모로', '코스타리카', '코트디부아르',
'콜롬비아', '콩고 공화국', '콩고 민주 공화국', '쿠바', '쿠웨이트', '크로아티아',
'키르기스스탄', '키리바시', '키프로스', '타이', '타지키스탄', '탄자니아', '터키',
'토고', '통가', '투르크메니스탄', '투발루', '튀니지', '트리니다드 토바고', '파나마',
'파라과이', '파키스탄', '파푸아 뉴기니', '팔라우', '페루', '포르투갈', '폴란드', '프랑스',
'피지', '핀란드', '필리핀', '헝가리'
)
building_dongs = (
'가',
'나',
'다',
'라',
'마',
'바',
'##',
'###',
)
land_numbers = (
'###',
'###-#',
'###-##',
)
road_numbers = (
'#',
'##',
'###',
)
town_formats = (
'{{first_name}}{{last_name}}{{town_suffix}}',
'{{first_name}}{{last_name}}{{last_name}}{{town_suffix}}',
)
building_name_formats = (
'{{first_name}}{{last_name}}{{building_suffix}}',
'{{first_name}}{{last_name}}{{last_name}}{{building_suffix}}',
)
address_detail_formats = (
'{{building_name}}',
'{{building_name}} ###호',
'{{building_name}} {{building_dong}}동 ###호',
)
road_formats = (
'{{road_name}}{{road_suffix}}',
'{{road_name}}{{road_number}}{{road_suffix}}',
)
road_address_formats = (
'{{metropolitan_city}} {{borough}} {{road}}',
'{{province}} {{city}} {{road}}',
'{{metropolitan_city}} {{borough}} {{road}} ({{town}})',
'{{province}} {{city}} {{road}} ({{town}})',
)
land_address_formats = (
'{{metropolitan_city}} {{borough}} {{town}} {{land_number}}',
'{{province}} {{city}} {{town}} {{land_number}}',
)
# Keep backward compatibility
city_suffixes = ('시',)
street_suffixes = road_suffixes
street_name_formats = ('{{road_name}}',)
street_address_formats = road_address_formats
address_formats = road_address_formats
@classmethod
def land_number(cls):
"""
:example 507
"""
return cls.bothify(cls.random_element(cls.land_numbers))
def land_address(self):
"""
:example 세종특별자치시 어진동 507
"""
pattern = self.random_element(self.land_address_formats)
return self.generator.parse(pattern)
@classmethod
def road_number(cls):
"""
:example 24
"""
return cls.bothify(cls.random_element(cls.road_numbers))
def road_address(self):
"""
:example 세종특별자치시 도움5로 19 (어진동)
"""
pattern = self.random_element(self.road_address_formats)
return self.generator.parse(pattern)
def address_detail(self):
"""
:example 가나아파트 가동 102호
"""
pattern = self.bothify(self.random_element(
self.address_detail_formats))
return self.generator.parse(pattern)
def road(self):
"""
:example 도움5로
"""
pattern = self.random_element(self.road_formats)
return self.generator.parse(pattern)
@classmethod
def road_name(cls):
"""
:example 압구정
"""
return cls.random_element(cls.road_names)
@classmethod
def road_suffix(cls):
"""
:example 길
"""
return cls.random_element(cls.road_suffixes)
@classmethod
def metropolitan_city(cls):
"""
:example 서울특별시
"""
return cls.random_element(cls.metropolitan_cities)
@classmethod
def province(cls):
"""
:example 경기도
"""
return cls.random_element(cls.provinces)
def city(self):
"""
:example 고양시
"""
pattern = self.random_element(self.cities)
return self.generator.parse(pattern)
@classmethod
def borough(cls):
"""
:example 중구
"""
return cls.random_element(cls.boroughs)
def town(self):
"""
:example 가나동
"""
pattern = self.random_element(self.town_formats)
return self.generator.parse(pattern)
@classmethod
def town_suffix(cls):
"""
:example 동
"""
return cls.random_element(cls.town_suffixes)
def building_name(self):
"""
:example 김구아파트
"""
pattern = self.random_element(self.building_name_formats)
return self.generator.parse(pattern)
@classmethod
def building_suffix(cls):
"""
:example 아파트
"""
return cls.random_element(cls.building_suffixes)
@classmethod
def building_dong(cls):
"""
:example 가
"""
return cls.bothify(cls.random_element(cls.building_dongs))
@classmethod
def old_postal_code(cls):
"""
:example 123-456
"""
return cls.bothify(cls.random_element(cls.postcode_formats))
@classmethod
def postal_code(cls):
"""
:example 12345
"""
return cls.bothify(cls.random_element(cls.new_postal_code_formats))
|
mit
|
thresholdsoftware/asylum
|
openerp/addons/account/wizard/account_move_line_reconcile_select.py
|
56
|
2399
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class account_move_line_reconcile_select(osv.osv_memory):
_name = "account.move.line.reconcile.select"
_description = "Move line reconcile select"
_columns = {
'account_id': fields.many2one('account.account', 'Account', \
domain = [('reconcile', '=', 1)], required=True),
}
def action_open_window(self, cr, uid, ids, context=None):
"""
This function Open account move line window for reconcile on given account id
@param cr: the current row, from the database cursor,
@param uid: the current user’s ID for security checks,
@param ids: account move line reconcile select’s ID or list of IDs
@return: dictionary of Open account move line window for reconcile on given account id
"""
data = self.read(cr, uid, ids, context=context)[0]
return {
'domain': "[('account_id','=',%d),('reconcile_id','=',False),('state','<>','draft')]" % data['account_id'],
'name': _('Reconciliation'),
'view_type': 'form',
'view_mode': 'tree,form',
'view_id': False,
'res_model': 'account.move.line',
'type': 'ir.actions.act_window'
}
account_move_line_reconcile_select()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
umass-bib/bibcpp
|
scripts/cppProjectScripts/createTestDir.py
|
8
|
4212
|
#!/usr/bin/env python3
import shutil, os, argparse, sys, stat,errno
import CppHeaderParser
from string import replace
from argparse import Action
sys.path.append(os.path.join(os.path.dirname(os.path.dirname(__file__)), "pyUtils"))
from color_text import ColorText as CT
from headInGraph import *
testerBodyTemplate = """
/*
TEST_CASE("Basic tests for {REPLACETHIS}", "[{REPLACETHIS_DETAILED}]" ){{
SECTION("GIVE SECTION NAME"){{
YOUR CODE GOES HERE
NORMALLY END WITH A REQUIRE STATEMENT e.g.
REQUIRE(TESTVAL1 == YOURVAL);
}}
}}
*/
"""
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else: raise
def mkdir_p_forFile(path):
mkdir_p(os.path.dirname(path))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--src', type=str, required = True)
parser.add_argument('--outDir', type=str, required = True)
parser.add_argument("--overWrite", action = 'store_true')
#parser.add_argument("--update", action = 'store_true')
return parser.parse_args()
def getFuncDetailed(func):
ret = ""
ret = ret + (func["rtnType"] + " ")
ret = ret + (func["name"] + " (")
count = 0
for par in func["parameters"]:
if(count != 0):
ret = ret + (",")
count +=1
ret = ret + (par["raw_type"])
if(par["reference"]):
ret = ret + ("&")
elif par["pointer"]:
ret = ret + ("*")
ret = ret + (" " + par["name"])
ret = ret + ")"
return ret
def createTestMain(path, overWrite):
mainBody = """
// based off https://github.com/philsquared/Catch/blob/master/docs/tutorial.md
#define CATCH_CONFIG_MAIN // This tells Catch to provide a main()
#include <catch.hpp>
"""
mainPath = os.path.join(path, "main.cpp")
if os.path.exists(mainPath):
if overWrite:
os.remove(mainPath)
else:
print(mainPath, "already exists, use --overWrite to remove current")
return
with open(mainPath, "w") as mainFile:
mainFile.write(mainBody)
def copyMakefile(fromLoc, dest, overWrite):
if os.path.exists(dest):
if overWrite:
os.remove(dest)
else:
print(dest, "already exists, use --overWrite to replace it")
return
shutil.copy(fromLoc, dest)
def main():
args = parse_args()
headers = fileCollection.getHeaderFiles(args.src)
for head in headers:
try:
cppHeader = CppHeaderParser.CppHeader(head)
except CppHeaderParser.CppParseError as e:
print(e)
sys.exit(1)
print(CT.boldBlack("Class public methods"))
if(len(cppHeader.classes) + len(cppHeader.functions) > 0):
testerCppPath = os.path.join(args.outDir,head.replace(".hpp", "Tester.cpp"))
mkdir_p_forFile(testerCppPath)
if os.path.exists(testerCppPath):
if args.overWrite:
os.remove(testerCppPath)
else:
print("Skipping", testerCppPath, "it already exist, use --overWrite to replace")
continue
with open(testerCppPath, "w") as testerFile:
testerFile.write("#include <catch.hpp>\n")
testerFile.write("#include \"" + "../" + head + "\"\n")
for func in cppHeader.functions:
testerFile.write(testerBodyTemplate.format(REPLACETHIS=func["name"], REPLACETHIS_DETAILED = getFuncDetailed(func)))
for k in list(cppHeader.classes.keys()):
for i in range(len(cppHeader.classes[k]["methods"]["public"])):
testerFile.write(testerBodyTemplate.format(REPLACETHIS=cppHeader.classes[k]["methods"]["public"][i]["name"], REPLACETHIS_DETAILED = getFuncDetailed(cppHeader.classes[k]["methods"]["public"][i])))
createTestMain(os.path.join(args.outDir, args.src), args.overWrite)
copyMakefile("scripts/cppMakefiles/unitTest/Makefile", os.path.join(args.outDir, "Makefile"), args.overWrite)
return 0
main()
|
gpl-3.0
|
Andrey-Pavlov/phantomjs
|
src/qt/qtwebkit/Tools/QueueStatusServer/handlers/showresults.py
|
146
|
1943
|
# Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.appengine.ext import webapp
from model.queuestatus import QueueStatus
class ShowResults(webapp.RequestHandler):
def get(self, status_id):
status = QueueStatus.get_by_id(int(status_id))
if not status:
self.error(404)
return
self.response.headers["Content-Type"] = "text/plain; charset=utf-8"
self.response.out.write(status.results_file)
|
bsd-3-clause
|
sauloal/pycluster
|
pypy-1.9_64/lib-python/2.7/tempfile.py
|
89
|
17875
|
"""Temporary files.
This module provides generic, low- and high-level interfaces for
creating temporary files and directories. The interfaces listed
as "safe" just below can be used without fear of race conditions.
Those listed as "unsafe" cannot, and are provided for backward
compatibility only.
This module also provides some data items to the user:
TMP_MAX - maximum number of names that will be tried before
giving up.
template - the default prefix for all temporary names.
You may change this to control the default prefix.
tempdir - If this is set to a string before the first use of
any routine from this module, it will be considered as
another candidate location to store temporary files.
"""
__all__ = [
"NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
"SpooledTemporaryFile",
"mkstemp", "mkdtemp", # low level safe interfaces
"mktemp", # deprecated unsafe interface
"TMP_MAX", "gettempprefix", # constants
"tempdir", "gettempdir"
]
# Imports.
import os as _os
import errno as _errno
from random import Random as _Random
try:
from cStringIO import StringIO as _StringIO
except ImportError:
from StringIO import StringIO as _StringIO
try:
import fcntl as _fcntl
except ImportError:
def _set_cloexec(fd):
pass
else:
def _set_cloexec(fd):
try:
flags = _fcntl.fcntl(fd, _fcntl.F_GETFD, 0)
except IOError:
pass
else:
# flags read successfully, modify
flags |= _fcntl.FD_CLOEXEC
_fcntl.fcntl(fd, _fcntl.F_SETFD, flags)
try:
import thread as _thread
except ImportError:
import dummy_thread as _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
if hasattr(_os, 'O_NOINHERIT'):
_text_openflags |= _os.O_NOINHERIT
if hasattr(_os, 'O_NOFOLLOW'):
_text_openflags |= _os.O_NOFOLLOW
_bin_openflags = _text_openflags
if hasattr(_os, 'O_BINARY'):
_bin_openflags |= _os.O_BINARY
if hasattr(_os, 'TMP_MAX'):
TMP_MAX = _os.TMP_MAX
else:
TMP_MAX = 10000
template = "tmp"
# Internal routines.
_once_lock = _allocate_lock()
if hasattr(_os, "lstat"):
_stat = _os.lstat
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
# Fallback. All we need is something that raises os.error if the
# file doesn't exist.
def _stat(fn):
try:
f = open(fn)
except IOError:
raise _os.error
f.close()
def _exists(fn):
try:
_stat(fn)
except _os.error:
return False
else:
return True
class _RandomNameSequence:
"""An instance of _RandomNameSequence generates an endless
sequence of unpredictable strings which can safely be incorporated
into file names. Each string is six characters long. Multiple
threads can safely use the same instance at the same time.
_RandomNameSequence is an iterator."""
characters = ("abcdefghijklmnopqrstuvwxyz" +
"ABCDEFGHIJKLMNOPQRSTUVWXYZ" +
"0123456789_")
def __init__(self):
self.mutex = _allocate_lock()
self.rng = _Random()
self.normcase = _os.path.normcase
def __iter__(self):
return self
def next(self):
m = self.mutex
c = self.characters
choose = self.rng.choice
m.acquire()
try:
letters = [choose(c) for dummy in "123456"]
finally:
m.release()
return self.normcase(''.join(letters))
def _candidate_tempdir_list():
"""Generate a list of candidate temporary directories which
_get_default_tempdir will try."""
dirlist = []
# First, try the environment.
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = _os.getenv(envname)
if dirname: dirlist.append(dirname)
# Failing that, try OS-specific locations.
if _os.name == 'riscos':
dirname = _os.getenv('Wimp$ScrapDir')
if dirname: dirlist.append(dirname)
elif _os.name == 'nt':
dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
else:
dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
except (AttributeError, _os.error):
dirlist.append(_os.curdir)
return dirlist
def _get_default_tempdir():
"""Calculate the default directory to use for temporary files.
This routine should be called exactly once.
We determine whether or not a candidate temp dir is usable by
trying to create and write to a file in that directory. If this
is successful, the test file is deleted. To prevent denial of
service, the name of the test file must be randomized."""
namer = _RandomNameSequence()
dirlist = _candidate_tempdir_list()
flags = _text_openflags
for dir in dirlist:
if dir != _os.curdir:
dir = _os.path.normcase(_os.path.abspath(dir))
# Try only a few names per directory.
for seq in xrange(100):
name = namer.next()
filename = _os.path.join(dir, name)
try:
fd = _os.open(filename, flags, 0600)
fp = _os.fdopen(fd, 'w')
fp.write('blat')
fp.close()
_os.unlink(filename)
del fp, fd
return dir
except (OSError, IOError), e:
if e[0] != _errno.EEXIST:
break # no point trying more names in this directory
pass
raise IOError, (_errno.ENOENT,
("No usable temporary directory found in %s" % dirlist))
_name_sequence = None
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
def _mkstemp_inner(dir, pre, suf, flags):
"""Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
names = _get_candidate_names()
for seq in xrange(TMP_MAX):
name = names.next()
file = _os.path.join(dir, pre + name + suf)
try:
fd = _os.open(file, flags, 0600)
_set_cloexec(fd)
return (fd, _os.path.abspath(file))
except OSError, e:
if e.errno == _errno.EEXIST:
continue # try again
raise
raise IOError, (_errno.EEXIST, "No usable temporary file name found")
# User visible interfaces.
def gettempprefix():
"""Accessor for tempdir.template."""
return template
tempdir = None
def gettempdir():
"""Accessor for tempfile.tempdir."""
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
def mkstemp(suffix="", prefix=template, dir=None, text=False):
"""User-callable function to create and return a unique temporary
file. The return value is a pair (fd, name) where fd is the
file descriptor returned by os.open, and name is the filename.
If 'suffix' is specified, the file name will end with that suffix,
otherwise there will be no suffix.
If 'prefix' is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
If 'dir' is specified, the file will be created in that directory,
otherwise a default directory is used.
If 'text' is specified and true, the file is opened in text
mode. Else (the default) the file is opened in binary mode. On
some operating systems, this makes no difference.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
Caller is responsible for deleting the file when done with it.
"""
if dir is None:
dir = gettempdir()
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags)
def mkdtemp(suffix="", prefix=template, dir=None):
"""User-callable function to create and return a unique temporary
directory. The return value is the pathname of the directory.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
The directory is readable, writable, and searchable only by the
creating user.
Caller is responsible for deleting the directory when done with it.
"""
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in xrange(TMP_MAX):
name = names.next()
file = _os.path.join(dir, prefix + name + suffix)
try:
_os.mkdir(file, 0700)
return file
except OSError, e:
if e.errno == _errno.EEXIST:
continue # try again
raise
raise IOError, (_errno.EEXIST, "No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
"""User-callable function to return a unique temporary file name. The
file is not created.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
This function is unsafe and should not be used. The file name
refers to a file that did not exist at some point, but by the time
you get around to creating it, someone else may have beaten you to
the punch.
"""
## from warnings import warn as _warn
## _warn("mktemp is a potential security risk to your program",
## RuntimeWarning, stacklevel=2)
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in xrange(TMP_MAX):
name = names.next()
file = _os.path.join(dir, prefix + name + suffix)
if not _exists(file):
return file
raise IOError, (_errno.EEXIST, "No usable temporary filename found")
class _TemporaryFileWrapper:
"""Temporary file wrapper
This class provides a wrapper around files opened for
temporary use. In particular, it seeks to automatically
remove the file when it is no longer needed.
"""
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.close_called = False
self.delete = delete
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying file
# and cached for non-numeric results
# (i.e. methods are cached, closed and friends are not)
file = self.__dict__['file']
a = getattr(file, name)
if not issubclass(type(a), type(0)):
setattr(self, name, a)
return a
# The underlying __enter__ method returns the wrong object
# (self.file) so override it to return the wrapper
def __enter__(self):
self.file.__enter__()
return self
# NT provides delete-on-close as a primitive, so we don't need
# the wrapper to do anything special. We still use it so that
# file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
if _os.name != 'nt':
# Cache the unlinker so we don't get spurious errors at
# shutdown when the module-level "os" is None'd out. Note
# that this must be referenced as self.unlink, because the
# name TemporaryFileWrapper may also get None'd out before
# __del__ is called.
unlink = _os.unlink
def close(self):
if not self.close_called:
self.close_called = True
self.file.close()
if self.delete:
self.unlink(self.name)
def __del__(self):
self.close()
# Need to trap __exit__ as well to ensure the file gets
# deleted when used in a with statement
def __exit__(self, exc, value, tb):
result = self.file.__exit__(exc, value, tb)
self.close()
return result
else:
def __exit__(self, exc, value, tb):
self.file.__exit__(exc, value, tb)
def NamedTemporaryFile(mode='w+b', bufsize=-1, suffix="",
prefix=template, dir=None, delete=True):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to os.fdopen (default "w+b").
'bufsize' -- the buffer size argument to os.fdopen (default -1).
'delete' -- whether the file is deleted on close (default True).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
is accessible as file.name. The file will be automatically deleted
when it is closed unless the 'delete' argument is set to False.
"""
if dir is None:
dir = gettempdir()
if 'b' in mode:
flags = _bin_openflags
else:
flags = _text_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if _os.name == 'nt' and delete:
flags |= _os.O_TEMPORARY
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
file = _os.fdopen(fd, mode, bufsize)
return _TemporaryFileWrapper(file, name, delete)
if _os.name != 'posix' or _os.sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
else:
def TemporaryFile(mode='w+b', bufsize=-1, suffix="",
prefix=template, dir=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to os.fdopen (default "w+b").
'bufsize' -- the buffer size argument to os.fdopen (default -1).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
name, and will cease to exist when it is closed.
"""
if dir is None:
dir = gettempdir()
if 'b' in mode:
flags = _bin_openflags
else:
flags = _text_openflags
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags)
try:
_os.unlink(name)
return _os.fdopen(fd, mode, bufsize)
except:
_os.close(fd)
raise
class SpooledTemporaryFile:
"""Temporary file wrapper, specialized to switch from
StringIO to a real file when it exceeds a certain size or
when a fileno is needed.
"""
_rolled = False
def __init__(self, max_size=0, mode='w+b', bufsize=-1,
suffix="", prefix=template, dir=None):
self._file = _StringIO()
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = (mode, bufsize, suffix, prefix, dir)
def _check(self, file):
if self._rolled: return
max_size = self._max_size
if max_size and file.tell() > max_size:
self.rollover()
def rollover(self):
if self._rolled: return
file = self._file
newfile = self._file = TemporaryFile(*self._TemporaryFileArgs)
del self._TemporaryFileArgs
newfile.write(file.getvalue())
newfile.seek(file.tell(), 0)
self._rolled = True
# The method caching trick from NamedTemporaryFile
# won't work here, because _file may change from a
# _StringIO instance to a real file. So we list
# all the methods directly.
# Context management protocol
def __enter__(self):
if self._file.closed:
raise ValueError("Cannot enter context with closed file")
return self
def __exit__(self, exc, value, tb):
self._file.close()
# file protocol
def __iter__(self):
return self._file.__iter__()
def close(self):
self._file.close()
@property
def closed(self):
return self._file.closed
@property
def encoding(self):
return self._file.encoding
def fileno(self):
self.rollover()
return self._file.fileno()
def flush(self):
self._file.flush()
def isatty(self):
return self._file.isatty()
@property
def mode(self):
return self._file.mode
@property
def name(self):
return self._file.name
@property
def newlines(self):
return self._file.newlines
def next(self):
return self._file.next
def read(self, *args):
return self._file.read(*args)
def readline(self, *args):
return self._file.readline(*args)
def readlines(self, *args):
return self._file.readlines(*args)
def seek(self, *args):
self._file.seek(*args)
@property
def softspace(self):
return self._file.softspace
def tell(self):
return self._file.tell()
def truncate(self):
self._file.truncate()
def write(self, s):
file = self._file
rv = file.write(s)
self._check(file)
return rv
def writelines(self, iterable):
file = self._file
rv = file.writelines(iterable)
self._check(file)
return rv
def xreadlines(self, *args):
return self._file.xreadlines(*args)
|
mit
|
dud225/incubator-airflow
|
tests/executor/test_executor.py
|
24
|
1058
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from airflow.executors.base_executor import BaseExecutor
class TestExecutor(BaseExecutor):
"""
TestExecutor is used for unit testing purposes.
"""
def execute_async(self, key, command, queue=None):
self.logger.debug("{} running task instances".format(len(self.running)))
self.logger.debug("{} in queue".format(len(self.queued_tasks)))
def heartbeat(self):
pass
def terminate(self):
pass
def end(self):
self.sync()
|
apache-2.0
|
ClusterHQ/benchmark-server
|
setup.py
|
1
|
1061
|
from setuptools import setup
def read(path):
"""
Read the contents of a file.
"""
with open(path) as f:
return f.read()
setup(
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: Implementation :: CPython',
],
name='benchmark-server',
description="Persist benchmarking results",
install_requires=read('requirements.txt'),
extras_require={
# This extra is for developers who need to work on the code.
"dev": read('dev-requirements.txt'),
},
entry_points={},
keywords="",
license="Apache 2.0",
url="https://github.com/ClusterHQ/benchmark-server/",
maintainer='Bridget McErlean',
maintainer_email='bridget.mcerlean@clusterhq.com',
long_description=read('README.rst'),
)
|
apache-2.0
|
CenturylinkTechnology/ansible-modules-extras
|
cloud/misc/proxmox.py
|
13
|
18979
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: proxmox
short_description: management of instances in Proxmox VE cluster
description:
- allows you to create/delete/stop instances in Proxmox VE cluster
- Starting in Ansible 2.1, it automatically detects containerization type (lxc for PVE 4, openvz for older)
version_added: "2.0"
options:
api_host:
description:
- the host of the Proxmox VE cluster
required: true
api_user:
description:
- the user to authenticate with
required: true
api_password:
description:
- the password to authenticate with
- you can use PROXMOX_PASSWORD environment variable
default: null
required: false
vmid:
description:
- the instance id
default: null
required: true
validate_certs:
description:
- enable / disable https certificate verification
default: false
required: false
type: boolean
node:
description:
- Proxmox VE node, when new VM will be created
- required only for C(state=present)
- for another states will be autodiscovered
default: null
required: false
pool:
description:
- Proxmox VE resource pool
default: null
required: false
version_added: "2.3"
password:
description:
- the instance root password
- required only for C(state=present)
default: null
required: false
hostname:
description:
- the instance hostname
- required only for C(state=present)
default: null
required: false
ostemplate:
description:
- the template for VM creating
- required only for C(state=present)
default: null
required: false
disk:
description:
- hard disk size in GB for instance
default: 3
required: false
cpus:
description:
- numbers of allocated cpus for instance
default: 1
required: false
memory:
description:
- memory size in MB for instance
default: 512
required: false
swap:
description:
- swap memory size in MB for instance
default: 0
required: false
netif:
description:
- specifies network interfaces for the container
default: null
required: false
type: A hash/dictionary defining interfaces
mounts:
description:
- specifies additional mounts (separate disks) for the container
default: null
required: false
type: A hash/dictionary defining mount points
version_added: "2.2"
ip_address:
description:
- specifies the address the container will be assigned
default: null
required: false
type: string
onboot:
description:
- specifies whether a VM will be started during system bootup
default: false
required: false
type: boolean
storage:
description:
- target storage
default: 'local'
required: false
type: string
cpuunits:
description:
- CPU weight for a VM
default: 1000
required: false
type: integer
nameserver:
description:
- sets DNS server IP address for a container
default: null
required: false
type: string
searchdomain:
description:
- sets DNS search domain for a container
default: null
required: false
type: string
timeout:
description:
- timeout for operations
default: 30
required: false
type: integer
force:
description:
- forcing operations
- can be used only with states C(present), C(stopped), C(restarted)
- with C(state=present) force option allow to overwrite existing container
- with states C(stopped) , C(restarted) allow to force stop instance
default: false
required: false
type: boolean
state:
description:
- Indicate desired state of the instance
choices: ['present', 'started', 'absent', 'stopped', 'restarted']
default: present
notes:
- Requires proxmoxer and requests modules on host. This modules can be installed with pip.
requirements: [ "proxmoxer", "python >= 2.7", "requests" ]
author: "Sergei Antipov @UnderGreen"
'''
EXAMPLES = '''
# Create new container with minimal options
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_password='1q2w3e' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz'
# Create new container with minimal options with force(it will rewrite existing container)
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_password='1q2w3e' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz' force=yes
# Create new container with minimal options use environment PROXMOX_PASSWORD variable(you should export it before)
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz'
# Create new container with minimal options defining network interface with dhcp
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_password='1q2w3e' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz' netif='{"net0":"name=eth0,ip=dhcp,ip6=dhcp,bridge=vmbr0"}'
# Create new container with minimal options defining network interface with static ip
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_password='1q2w3e' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz' netif='{"net0":"name=eth0,gw=192.168.0.1,ip=192.168.0.2/24,bridge=vmbr0"}'
# Create new container with minimal options defining a mount
- proxmox: vmid=100 node='uk-mc02' api_user='root@pam' api_password='1q2w3e' api_host='node1' password='123456' hostname='example.org' ostemplate='local:vztmpl/ubuntu-14.04-x86_64.tar.gz' mounts='{"mp0":"local:8,mp=/mnt/test/"}'
# Start container
- proxmox: vmid=100 api_user='root@pam' api_password='1q2w3e' api_host='node1' state=started
# Stop container
- proxmox: vmid=100 api_user='root@pam' api_password='1q2w3e' api_host='node1' state=stopped
# Stop container with force
- proxmox: vmid=100 api_user='root@pam' api_password='1q2w3e' api_host='node1' force=yes state=stopped
# Restart container(stopped or mounted container you can't restart)
- proxmox: vmid=100 api_user='root@pam' api_password='1q2w3e' api_host='node1' state=stopped
# Remove container
- proxmox: vmid=100 api_user='root@pam' api_password='1q2w3e' api_host='node1' state=absent
'''
import os
import time
try:
from proxmoxer import ProxmoxAPI
HAS_PROXMOXER = True
except ImportError:
HAS_PROXMOXER = False
VZ_TYPE=None
def get_instance(proxmox, vmid):
return [ vm for vm in proxmox.cluster.resources.get(type='vm') if vm['vmid'] == int(vmid) ]
def content_check(proxmox, node, ostemplate, template_store):
return [ True for cnt in proxmox.nodes(node).storage(template_store).content.get() if cnt['volid'] == ostemplate ]
def node_check(proxmox, node):
return [ True for nd in proxmox.nodes.get() if nd['node'] == node ]
def create_instance(module, proxmox, vmid, node, disk, storage, cpus, memory, swap, timeout, **kwargs):
proxmox_node = proxmox.nodes(node)
kwargs = dict((k,v) for k, v in kwargs.iteritems() if v is not None)
if VZ_TYPE =='lxc':
kwargs['cpulimit']=cpus
kwargs['rootfs']=disk
if 'netif' in kwargs:
kwargs.update(kwargs['netif'])
del kwargs['netif']
if 'mounts' in kwargs:
kwargs.update(kwargs['mounts'])
del kwargs['mounts']
else:
kwargs['cpus']=cpus
kwargs['disk']=disk
taskid = getattr(proxmox_node, VZ_TYPE).create(vmid=vmid, storage=storage, memory=memory, swap=swap, **kwargs)
while timeout:
if ( proxmox_node.tasks(taskid).status.get()['status'] == 'stopped'
and proxmox_node.tasks(taskid).status.get()['exitstatus'] == 'OK' ):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for creating VM. Last line in task before timeout: %s'
% proxmox_node.tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def start_instance(module, proxmox, vm, vmid, timeout):
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.start.post()
while timeout:
if ( proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped'
and proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK' ):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for starting VM. Last line in task before timeout: %s'
% proxmox.nodes(vm[0]['node']).tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def stop_instance(module, proxmox, vm, vmid, timeout, force):
if force:
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.shutdown.post(forceStop=1)
else:
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.shutdown.post()
while timeout:
if ( proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped'
and proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK' ):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for stopping VM. Last line in task before timeout: %s'
% proxmox_node.tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def umount_instance(module, proxmox, vm, vmid, timeout):
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.umount.post()
while timeout:
if ( proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped'
and proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK' ):
return True
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for unmounting VM. Last line in task before timeout: %s'
% proxmox_node.tasks(taskid).log.get()[:1])
time.sleep(1)
return False
def main():
module = AnsibleModule(
argument_spec = dict(
api_host = dict(required=True),
api_user = dict(required=True),
api_password = dict(no_log=True),
vmid = dict(required=True),
validate_certs = dict(type='bool', default='no'),
node = dict(),
pool = dict(),
password = dict(no_log=True),
hostname = dict(),
ostemplate = dict(),
disk = dict(type='str', default='3'),
cpus = dict(type='int', default=1),
memory = dict(type='int', default=512),
swap = dict(type='int', default=0),
netif = dict(type='dict'),
mounts = dict(type='dict'),
ip_address = dict(),
onboot = dict(type='bool', default='no'),
storage = dict(default='local'),
cpuunits = dict(type='int', default=1000),
nameserver = dict(),
searchdomain = dict(),
timeout = dict(type='int', default=30),
force = dict(type='bool', default='no'),
state = dict(default='present', choices=['present', 'absent', 'stopped', 'started', 'restarted']),
)
)
if not HAS_PROXMOXER:
module.fail_json(msg='proxmoxer required for this module')
state = module.params['state']
api_user = module.params['api_user']
api_host = module.params['api_host']
api_password = module.params['api_password']
vmid = module.params['vmid']
validate_certs = module.params['validate_certs']
node = module.params['node']
disk = module.params['disk']
cpus = module.params['cpus']
memory = module.params['memory']
swap = module.params['swap']
storage = module.params['storage']
if module.params['ostemplate'] is not None:
template_store = module.params['ostemplate'].split(":")[0]
timeout = module.params['timeout']
# If password not set get it from PROXMOX_PASSWORD env
if not api_password:
try:
api_password = os.environ['PROXMOX_PASSWORD']
except KeyError as e:
module.fail_json(msg='You should set api_password param or use PROXMOX_PASSWORD environment variable')
try:
proxmox = ProxmoxAPI(api_host, user=api_user, password=api_password, verify_ssl=validate_certs)
global VZ_TYPE
VZ_TYPE = 'openvz' if float(proxmox.version.get()['version']) < 4.0 else 'lxc'
except Exception as e:
module.fail_json(msg='authorization on proxmox cluster failed with exception: %s' % e)
if state == 'present':
try:
if get_instance(proxmox, vmid) and not module.params['force']:
module.exit_json(changed=False, msg="VM with vmid = %s is already exists" % vmid)
elif not (node, module.params['hostname'] and module.params['password'] and module.params['ostemplate']):
module.fail_json(msg='node, hostname, password and ostemplate are mandatory for creating vm')
elif not node_check(proxmox, node):
module.fail_json(msg="node '%s' not exists in cluster" % node)
elif not content_check(proxmox, node, module.params['ostemplate'], template_store):
module.fail_json(msg="ostemplate '%s' not exists on node %s and storage %s"
% (module.params['ostemplate'], node, template_store))
create_instance(module, proxmox, vmid, node, disk, storage, cpus, memory, swap, timeout,
pool = module.params['pool'],
password = module.params['password'],
hostname = module.params['hostname'],
ostemplate = module.params['ostemplate'],
netif = module.params['netif'],
mounts = module.params['mounts'],
ip_address = module.params['ip_address'],
onboot = int(module.params['onboot']),
cpuunits = module.params['cpuunits'],
nameserver = module.params['nameserver'],
searchdomain = module.params['searchdomain'],
force = int(module.params['force']))
module.exit_json(changed=True, msg="deployed VM %s from template %s" % (vmid, module.params['ostemplate']))
except Exception as e:
module.fail_json(msg="creation of %s VM %s failed with exception: %s" % ( VZ_TYPE, vmid, e ))
elif state == 'started':
try:
vm = get_instance(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s not exists in cluster' % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running':
module.exit_json(changed=False, msg="VM %s is already running" % vmid)
if start_instance(module, proxmox, vm, vmid, timeout):
module.exit_json(changed=True, msg="VM %s started" % vmid)
except Exception as e:
module.fail_json(msg="starting of VM %s failed with exception: %s" % ( vmid, e ))
elif state == 'stopped':
try:
vm = get_instance(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s not exists in cluster' % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted':
if module.params['force']:
if umount_instance(module, proxmox, vm, vmid, timeout):
module.exit_json(changed=True, msg="VM %s is shutting down" % vmid)
else:
module.exit_json(changed=False, msg=("VM %s is already shutdown, but mounted. "
"You can use force option to umount it.") % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'stopped':
module.exit_json(changed=False, msg="VM %s is already shutdown" % vmid)
if stop_instance(module, proxmox, vm, vmid, timeout, force = module.params['force']):
module.exit_json(changed=True, msg="VM %s is shutting down" % vmid)
except Exception as e:
module.fail_json(msg="stopping of VM %s failed with exception: %s" % ( vmid, e ))
elif state == 'restarted':
try:
vm = get_instance(proxmox, vmid)
if not vm:
module.fail_json(msg='VM with vmid = %s not exists in cluster' % vmid)
if ( getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'stopped'
or getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted' ):
module.exit_json(changed=False, msg="VM %s is not running" % vmid)
if ( stop_instance(module, proxmox, vm, vmid, timeout, force = module.params['force']) and
start_instance(module, proxmox, vm, vmid, timeout) ):
module.exit_json(changed=True, msg="VM %s is restarted" % vmid)
except Exception as e:
module.fail_json(msg="restarting of VM %s failed with exception: %s" % ( vmid, e ))
elif state == 'absent':
try:
vm = get_instance(proxmox, vmid)
if not vm:
module.exit_json(changed=False, msg="VM %s does not exist" % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'running':
module.exit_json(changed=False, msg="VM %s is running. Stop it before deletion." % vmid)
if getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE)(vmid).status.current.get()['status'] == 'mounted':
module.exit_json(changed=False, msg="VM %s is mounted. Stop it with force option before deletion." % vmid)
taskid = getattr(proxmox.nodes(vm[0]['node']), VZ_TYPE).delete(vmid)
while timeout:
if ( proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['status'] == 'stopped'
and proxmox.nodes(vm[0]['node']).tasks(taskid).status.get()['exitstatus'] == 'OK' ):
module.exit_json(changed=True, msg="VM %s removed" % vmid)
timeout = timeout - 1
if timeout == 0:
module.fail_json(msg='Reached timeout while waiting for removing VM. Last line in task before timeout: %s'
% proxmox_node.tasks(taskid).log.get()[:1])
time.sleep(1)
except Exception as e:
module.fail_json(msg="deletion of VM %s failed with exception: %s" % ( vmid, e ))
# import module snippets
from ansible.module_utils.basic import *
main()
|
gpl-3.0
|
vegitron/ansible
|
test/units/module_utils/basic/test__log_invocation.py
|
2
|
3323
|
# -*- coding: utf-8 -*-
# (c) 2016, James Cammarata <jimi@sngx.net>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import sys
import json
from units.mock.procenv import swap_stdin_and_argv
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import MagicMock
class TestModuleUtilsBasic(unittest.TestCase):
@unittest.skipIf(sys.version_info[0] >= 3, "Python 3 is not supported on targets (yet)")
def test_module_utils_basic__log_invocation(self):
with swap_stdin_and_argv(stdin_data=json.dumps(
dict(
ANSIBLE_MODULE_ARGS=dict(
foo=False, bar=[1,2,3], bam="bam", baz=u'baz'),
ANSIBLE_MODULE_CONSTANTS=dict()
))):
from ansible.module_utils import basic
# test basic log invocation
am = basic.AnsibleModule(
argument_spec=dict(
foo = dict(default=True, type='bool'),
bar = dict(default=[], type='list'),
bam = dict(default="bam"),
baz = dict(default=u"baz"),
password = dict(default=True),
no_log = dict(default="you shouldn't see me", no_log=True),
),
)
am.log = MagicMock()
am._log_invocation()
# Message is generated from a dict so it will be in an unknown order.
# have to check this manually rather than with assert_called_with()
args = am.log.call_args[0]
self.assertEqual(len(args), 1)
message = args[0]
self.assertEqual(len(message), len('Invoked with bam=bam bar=[1, 2, 3] foo=False baz=baz no_log=NOT_LOGGING_PARAMETER password=NOT_LOGGING_PASSWORD'))
self.assertTrue(message.startswith('Invoked with '))
self.assertIn(' bam=bam', message)
self.assertIn(' bar=[1, 2, 3]', message)
self.assertIn(' foo=False', message)
self.assertIn(' baz=baz', message)
self.assertIn(' no_log=NOT_LOGGING_PARAMETER', message)
self.assertIn(' password=NOT_LOGGING_PASSWORD', message)
kwargs = am.log.call_args[1]
self.assertEqual(kwargs,
dict(log_args={
'foo': 'False',
'bar': '[1, 2, 3]',
'bam': 'bam',
'baz': 'baz',
'password': 'NOT_LOGGING_PASSWORD',
'no_log': 'NOT_LOGGING_PARAMETER',
})
)
|
gpl-3.0
|
MiLk/ansible
|
lib/ansible/module_utils/connection.py
|
17
|
5354
|
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# (c) 2017 Red Hat Inc.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import signal
import socket
import struct
import os
import uuid
from functools import partial
from ansible.module_utils.basic import get_exception
from ansible.module_utils._text import to_bytes, to_native, to_text
def send_data(s, data):
packed_len = struct.pack('!Q', len(data))
return s.sendall(packed_len + data)
def recv_data(s):
header_len = 8 # size of a packed unsigned long long
data = to_bytes("")
while len(data) < header_len:
d = s.recv(header_len - len(data))
if not d:
return None
data += d
data_len = struct.unpack('!Q', data[:header_len])[0]
data = data[header_len:]
while len(data) < data_len:
d = s.recv(data_len - len(data))
if not d:
return None
data += d
return data
def exec_command(module, command):
try:
sf = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sf.connect(module._socket_path)
data = "EXEC: %s" % command
send_data(sf, to_bytes(data.strip()))
rc = int(recv_data(sf), 10)
stdout = recv_data(sf)
stderr = recv_data(sf)
except socket.error:
exc = get_exception()
sf.close()
module.fail_json(msg='unable to connect to socket', err=str(exc))
sf.close()
return rc, to_native(stdout, errors='surrogate_or_strict'), to_native(stderr, errors='surrogate_or_strict')
def request_builder(method, *args, **kwargs):
reqid = str(uuid.uuid4())
req = {'jsonrpc': '2.0', 'method': method, 'id': reqid}
params = list(args) or kwargs or None
if params:
req['params'] = params
return req
class Connection:
def __init__(self, module):
self._module = module
def __getattr__(self, name):
try:
return self.__dict__[name]
except KeyError:
if name.startswith('_'):
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, name))
return partial(self.__rpc__, name)
def __rpc__(self, name, *args, **kwargs):
"""Executes the json-rpc and returns the output received
from remote device.
:name: rpc method to be executed over connection plugin that implements jsonrpc 2.0
:args: Ordered list of params passed as arguments to rpc method
:kwargs: Dict of valid key, value pairs passed as arguments to rpc method
For usage refer the respective connection plugin docs.
"""
req = request_builder(name, *args, **kwargs)
reqid = req['id']
if not self._module._socket_path:
self._module.fail_json(msg='provider support not available for this host')
if not os.path.exists(self._module._socket_path):
self._module.fail_json(msg='provider socket does not exist, is the provider running?')
try:
data = self._module.jsonify(req)
rc, out, err = exec_command(self._module, data)
except socket.error:
exc = get_exception()
self._module.fail_json(msg='unable to connect to socket', err=str(exc))
try:
response = self._module.from_json(to_text(out, errors='surrogate_then_replace'))
except ValueError as exc:
self._module.fail_json(msg=to_text(exc, errors='surrogate_then_replace'))
if response['id'] != reqid:
self._module.fail_json(msg='invalid id received')
if 'error' in response:
msg = response['error'].get('data') or response['error']['message']
self._module.fail_json(msg=to_text(msg, errors='surrogate_then_replace'))
return response['result']
|
gpl-3.0
|
clumsy/intellij-community
|
python/lib/Lib/site-packages/django/contrib/gis/db/models/sql/compiler.py
|
222
|
12579
|
from itertools import izip
from django.db.backends.util import truncate_name
from django.db.models.sql import compiler
from django.db.models.sql.constants import TABLE_NAME
from django.db.models.sql.query import get_proxied_model
SQLCompiler = compiler.SQLCompiler
class GeoSQLCompiler(compiler.SQLCompiler):
def get_columns(self, with_aliases=False):
"""
Return the list of columns to use in the select statement. If no
columns have been specified, returns all columns relating to fields in
the model.
If 'with_aliases' is true, any column names that are duplicated
(without the table names) are given unique aliases. This is needed in
some cases to avoid ambiguitity with nested queries.
This routine is overridden from Query to handle customized selection of
geometry columns.
"""
qn = self.quote_name_unless_alias
qn2 = self.connection.ops.quote_name
result = ['(%s) AS %s' % (self.get_extra_select_format(alias) % col[0], qn2(alias))
for alias, col in self.query.extra_select.iteritems()]
aliases = set(self.query.extra_select.keys())
if with_aliases:
col_aliases = aliases.copy()
else:
col_aliases = set()
if self.query.select:
only_load = self.deferred_to_columns()
# This loop customized for GeoQuery.
for col, field in izip(self.query.select, self.query.select_fields):
if isinstance(col, (list, tuple)):
alias, column = col
table = self.query.alias_map[alias][TABLE_NAME]
if table in only_load and col not in only_load[table]:
continue
r = self.get_field_select(field, alias, column)
if with_aliases:
if col[1] in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append('%s AS %s' % (r, qn2(col[1])))
aliases.add(r)
col_aliases.add(col[1])
else:
result.append(r)
aliases.add(r)
col_aliases.add(col[1])
else:
result.append(col.as_sql(qn, self.connection))
if hasattr(col, 'alias'):
aliases.add(col.alias)
col_aliases.add(col.alias)
elif self.query.default_cols:
cols, new_aliases = self.get_default_columns(with_aliases,
col_aliases)
result.extend(cols)
aliases.update(new_aliases)
max_name_length = self.connection.ops.max_name_length()
result.extend([
'%s%s' % (
self.get_extra_select_format(alias) % aggregate.as_sql(qn, self.connection),
alias is not None
and ' AS %s' % qn(truncate_name(alias, max_name_length))
or ''
)
for alias, aggregate in self.query.aggregate_select.items()
])
# This loop customized for GeoQuery.
for (table, col), field in izip(self.query.related_select_cols, self.query.related_select_fields):
r = self.get_field_select(field, table, col)
if with_aliases and col in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (r, c_alias))
aliases.add(c_alias)
col_aliases.add(c_alias)
else:
result.append(r)
aliases.add(r)
col_aliases.add(col)
self._select_aliases = aliases
return result
def get_default_columns(self, with_aliases=False, col_aliases=None,
start_alias=None, opts=None, as_pairs=False, local_only=False):
"""
Computes the default columns for selecting every field in the base
model. Will sometimes be called to pull in related models (e.g. via
select_related), in which case "opts" and "start_alias" will be given
to provide a starting point for the traversal.
Returns a list of strings, quoted appropriately for use in SQL
directly, as well as a set of aliases used in the select statement (if
'as_pairs' is True, returns a list of (alias, col_name) pairs instead
of strings as the first component and None as the second component).
This routine is overridden from Query to handle customized selection of
geometry columns.
"""
result = []
if opts is None:
opts = self.query.model._meta
aliases = set()
only_load = self.deferred_to_columns()
# Skip all proxy to the root proxied model
proxied_model = get_proxied_model(opts)
if start_alias:
seen = {None: start_alias}
for field, model in opts.get_fields_with_model():
if local_only and model is not None:
continue
if start_alias:
try:
alias = seen[model]
except KeyError:
if model is proxied_model:
alias = start_alias
else:
link_field = opts.get_ancestor_link(model)
alias = self.query.join((start_alias, model._meta.db_table,
link_field.column, model._meta.pk.column))
seen[model] = alias
else:
# If we're starting from the base model of the queryset, the
# aliases will have already been set up in pre_sql_setup(), so
# we can save time here.
alias = self.query.included_inherited_models[model]
table = self.query.alias_map[alias][TABLE_NAME]
if table in only_load and field.column not in only_load[table]:
continue
if as_pairs:
result.append((alias, field.column))
aliases.add(alias)
continue
# This part of the function is customized for GeoQuery. We
# see if there was any custom selection specified in the
# dictionary, and set up the selection format appropriately.
field_sel = self.get_field_select(field, alias)
if with_aliases and field.column in col_aliases:
c_alias = 'Col%d' % len(col_aliases)
result.append('%s AS %s' % (field_sel, c_alias))
col_aliases.add(c_alias)
aliases.add(c_alias)
else:
r = field_sel
result.append(r)
aliases.add(r)
if with_aliases:
col_aliases.add(field.column)
return result, aliases
def resolve_columns(self, row, fields=()):
"""
This routine is necessary so that distances and geometries returned
from extra selection SQL get resolved appropriately into Python
objects.
"""
values = []
aliases = self.query.extra_select.keys()
if self.query.aggregates:
# If we have an aggregate annotation, must extend the aliases
# so their corresponding row values are included.
aliases.extend([None for i in xrange(len(self.query.aggregates))])
# Have to set a starting row number offset that is used for
# determining the correct starting row index -- needed for
# doing pagination with Oracle.
rn_offset = 0
if self.connection.ops.oracle:
if self.query.high_mark is not None or self.query.low_mark: rn_offset = 1
index_start = rn_offset + len(aliases)
# Converting any extra selection values (e.g., geometries and
# distance objects added by GeoQuerySet methods).
values = [self.query.convert_values(v,
self.query.extra_select_fields.get(a, None),
self.connection)
for v, a in izip(row[rn_offset:index_start], aliases)]
if self.connection.ops.oracle or getattr(self.query, 'geo_values', False):
# We resolve the rest of the columns if we're on Oracle or if
# the `geo_values` attribute is defined.
for value, field in map(None, row[index_start:], fields):
values.append(self.query.convert_values(value, field, connection=self.connection))
else:
values.extend(row[index_start:])
return tuple(values)
#### Routines unique to GeoQuery ####
def get_extra_select_format(self, alias):
sel_fmt = '%s'
if alias in self.query.custom_select:
sel_fmt = sel_fmt % self.query.custom_select[alias]
return sel_fmt
def get_field_select(self, field, alias=None, column=None):
"""
Returns the SELECT SQL string for the given field. Figures out
if any custom selection SQL is needed for the column The `alias`
keyword may be used to manually specify the database table where
the column exists, if not in the model associated with this
`GeoQuery`. Similarly, `column` may be used to specify the exact
column name, rather than using the `column` attribute on `field`.
"""
sel_fmt = self.get_select_format(field)
if field in self.query.custom_select:
field_sel = sel_fmt % self.query.custom_select[field]
else:
field_sel = sel_fmt % self._field_column(field, alias, column)
return field_sel
def get_select_format(self, fld):
"""
Returns the selection format string, depending on the requirements
of the spatial backend. For example, Oracle and MySQL require custom
selection formats in order to retrieve geometries in OGC WKT. For all
other fields a simple '%s' format string is returned.
"""
if self.connection.ops.select and hasattr(fld, 'geom_type'):
# This allows operations to be done on fields in the SELECT,
# overriding their values -- used by the Oracle and MySQL
# spatial backends to get database values as WKT, and by the
# `transform` method.
sel_fmt = self.connection.ops.select
# Because WKT doesn't contain spatial reference information,
# the SRID is prefixed to the returned WKT to ensure that the
# transformed geometries have an SRID different than that of the
# field -- this is only used by `transform` for Oracle and
# SpatiaLite backends.
if self.query.transformed_srid and ( self.connection.ops.oracle or
self.connection.ops.spatialite ):
sel_fmt = "'SRID=%d;'||%s" % (self.query.transformed_srid, sel_fmt)
else:
sel_fmt = '%s'
return sel_fmt
# Private API utilities, subject to change.
def _field_column(self, field, table_alias=None, column=None):
"""
Helper function that returns the database column for the given field.
The table and column are returned (quoted) in the proper format, e.g.,
`"geoapp_city"."point"`. If `table_alias` is not specified, the
database table associated with the model of this `GeoQuery` will be
used. If `column` is specified, it will be used instead of the value
in `field.column`.
"""
if table_alias is None: table_alias = self.query.model._meta.db_table
return "%s.%s" % (self.quote_name_unless_alias(table_alias),
self.connection.ops.quote_name(column or field.column))
class SQLInsertCompiler(compiler.SQLInsertCompiler, GeoSQLCompiler):
pass
class SQLDeleteCompiler(compiler.SQLDeleteCompiler, GeoSQLCompiler):
pass
class SQLUpdateCompiler(compiler.SQLUpdateCompiler, GeoSQLCompiler):
pass
class SQLAggregateCompiler(compiler.SQLAggregateCompiler, GeoSQLCompiler):
pass
class SQLDateCompiler(compiler.SQLDateCompiler, GeoSQLCompiler):
pass
|
apache-2.0
|
danielvdende/incubator-airflow
|
airflow/contrib/executors/kubernetes_executor.py
|
1
|
27629
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import base64
import multiprocessing
from queue import Queue
from dateutil import parser
from uuid import uuid4
import kubernetes
from kubernetes import watch, client
from kubernetes.client.rest import ApiException
from airflow.configuration import conf
from airflow.contrib.kubernetes.pod_launcher import PodLauncher
from airflow.contrib.kubernetes.kube_client import get_kube_client
from airflow.contrib.kubernetes.worker_configuration import WorkerConfiguration
from airflow.executors.base_executor import BaseExecutor
from airflow.executors import Executors
from airflow.models import TaskInstance, KubeResourceVersion, KubeWorkerIdentifier
from airflow.utils.state import State
from airflow import configuration, settings
from airflow.exceptions import AirflowConfigException, AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
class KubernetesExecutorConfig:
def __init__(self, image=None, image_pull_policy=None, request_memory=None,
request_cpu=None, limit_memory=None, limit_cpu=None,
gcp_service_account_key=None, node_selectors=None, affinity=None,
annotations=None):
self.image = image
self.image_pull_policy = image_pull_policy
self.request_memory = request_memory
self.request_cpu = request_cpu
self.limit_memory = limit_memory
self.limit_cpu = limit_cpu
self.gcp_service_account_key = gcp_service_account_key
self.node_selectors = node_selectors
self.affinity = affinity
self.annotations = annotations
def __repr__(self):
return "{}(image={}, image_pull_policy={}, request_memory={}, request_cpu={}, " \
"limit_memory={}, limit_cpu={}, gcp_service_account_key={}, " \
"node_selectors={}, affinity={}, annotations={})" \
.format(KubernetesExecutorConfig.__name__, self.image, self.image_pull_policy,
self.request_memory, self.request_cpu, self.limit_memory,
self.limit_cpu, self.gcp_service_account_key, self.node_selectors,
self.affinity, self.annotations)
@staticmethod
def from_dict(obj):
if obj is None:
return KubernetesExecutorConfig()
if not isinstance(obj, dict):
raise TypeError(
'Cannot convert a non-dictionary object into a KubernetesExecutorConfig')
namespaced = obj.get(Executors.KubernetesExecutor, {})
return KubernetesExecutorConfig(
image=namespaced.get('image', None),
image_pull_policy=namespaced.get('image_pull_policy', None),
request_memory=namespaced.get('request_memory', None),
request_cpu=namespaced.get('request_cpu', None),
limit_memory=namespaced.get('limit_memory', None),
limit_cpu=namespaced.get('limit_cpu', None),
gcp_service_account_key=namespaced.get('gcp_service_account_key', None),
node_selectors=namespaced.get('node_selectors', None),
affinity=namespaced.get('affinity', None),
annotations=namespaced.get('annotations', {}),
)
def as_dict(self):
return {
'image': self.image,
'image_pull_policy': self.image_pull_policy,
'request_memory': self.request_memory,
'request_cpu': self.request_cpu,
'limit_memory': self.limit_memory,
'limit_cpu': self.limit_cpu,
'gcp_service_account_key': self.gcp_service_account_key,
'node_selectors': self.node_selectors,
'affinity': self.affinity,
'annotations': self.annotations,
}
class KubeConfig:
core_section = 'core'
kubernetes_section = 'kubernetes'
def __init__(self):
configuration_dict = configuration.as_dict(display_sensitive=True)
self.core_configuration = configuration_dict['core']
self.kube_secrets = configuration_dict.get('kubernetes_secrets', {})
self.airflow_home = configuration.get(self.core_section, 'airflow_home')
self.dags_folder = configuration.get(self.core_section, 'dags_folder')
self.parallelism = configuration.getint(self.core_section, 'PARALLELISM')
self.worker_container_repository = configuration.get(
self.kubernetes_section, 'worker_container_repository')
self.worker_container_tag = configuration.get(
self.kubernetes_section, 'worker_container_tag')
self.kube_image = '{}:{}'.format(
self.worker_container_repository, self.worker_container_tag)
self.kube_image_pull_policy = configuration.get(
self.kubernetes_section, "worker_container_image_pull_policy"
)
self.kube_node_selectors = configuration_dict.get('kubernetes_node_selectors', {})
self.delete_worker_pods = conf.getboolean(
self.kubernetes_section, 'delete_worker_pods')
self.worker_service_account_name = conf.get(
self.kubernetes_section, 'worker_service_account_name')
self.image_pull_secrets = conf.get(self.kubernetes_section, 'image_pull_secrets')
# NOTE: `git_repo` and `git_branch` must be specified together as a pair
# The http URL of the git repository to clone from
self.git_repo = conf.get(self.kubernetes_section, 'git_repo')
# The branch of the repository to be checked out
self.git_branch = conf.get(self.kubernetes_section, 'git_branch')
# Optionally, the directory in the git repository containing the dags
self.git_subpath = conf.get(self.kubernetes_section, 'git_subpath')
# Optionally a user may supply a `git_user` and `git_password` for private
# repositories
self.git_user = conf.get(self.kubernetes_section, 'git_user')
self.git_password = conf.get(self.kubernetes_section, 'git_password')
# NOTE: The user may optionally use a volume claim to mount a PV containing
# DAGs directly
self.dags_volume_claim = conf.get(self.kubernetes_section, 'dags_volume_claim')
# This prop may optionally be set for PV Claims and is used to write logs
self.logs_volume_claim = conf.get(self.kubernetes_section, 'logs_volume_claim')
# This prop may optionally be set for PV Claims and is used to locate DAGs
# on a SubPath
self.dags_volume_subpath = conf.get(
self.kubernetes_section, 'dags_volume_subpath')
# This prop may optionally be set for PV Claims and is used to locate logs
# on a SubPath
self.logs_volume_subpath = conf.get(
self.kubernetes_section, 'logs_volume_subpath')
# This prop may optionally be set for PV Claims and is used to write logs
self.base_log_folder = configuration.get(self.core_section, 'base_log_folder')
# The Kubernetes Namespace in which the Scheduler and Webserver reside. Note
# that if your
# cluster has RBAC enabled, your scheduler may need service account permissions to
# create, watch, get, and delete pods in this namespace.
self.kube_namespace = conf.get(self.kubernetes_section, 'namespace')
# The Kubernetes Namespace in which pods will be created by the executor. Note
# that if your
# cluster has RBAC enabled, your workers may need service account permissions to
# interact with cluster components.
self.executor_namespace = conf.get(self.kubernetes_section, 'namespace')
# Task secrets managed by KubernetesExecutor.
self.gcp_service_account_keys = conf.get(self.kubernetes_section,
'gcp_service_account_keys')
# If the user is using the git-sync container to clone their repository via git,
# allow them to specify repository, tag, and pod name for the init container.
self.git_sync_container_repository = conf.get(
self.kubernetes_section, 'git_sync_container_repository')
self.git_sync_container_tag = conf.get(
self.kubernetes_section, 'git_sync_container_tag')
self.git_sync_container = '{}:{}'.format(
self.git_sync_container_repository, self.git_sync_container_tag)
self.git_sync_init_container_name = conf.get(
self.kubernetes_section, 'git_sync_init_container_name')
# The worker pod may optionally have a valid Airflow config loaded via a
# configmap
self.airflow_configmap = conf.get(self.kubernetes_section, 'airflow_configmap')
self._validate()
def _validate(self):
if not self.dags_volume_claim and (not self.git_repo or not self.git_branch):
raise AirflowConfigException(
'In kubernetes mode the following must be set in the `kubernetes` '
'config section: `dags_volume_claim` or `git_repo and git_branch`')
class KubernetesJobWatcher(multiprocessing.Process, LoggingMixin, object):
def __init__(self, namespace, watcher_queue, resource_version, worker_uuid):
multiprocessing.Process.__init__(self)
self.namespace = namespace
self.worker_uuid = worker_uuid
self.watcher_queue = watcher_queue
self.resource_version = resource_version
def run(self):
kube_client = get_kube_client()
while True:
try:
self.resource_version = self._run(kube_client, self.resource_version,
self.worker_uuid)
except Exception:
self.log.exception('Unknown error in KubernetesJobWatcher. Failing')
raise
else:
self.log.warn('Watch died gracefully, starting back up with: '
'last resource_version: %s', self.resource_version)
def _run(self, kube_client, resource_version, worker_uuid):
self.log.info(
'Event: and now my watch begins starting at resource_version: %s',
resource_version
)
watcher = watch.Watch()
kwargs = {'label_selector': 'airflow-worker={}'.format(worker_uuid)}
if resource_version:
kwargs['resource_version'] = resource_version
last_resource_version = None
for event in watcher.stream(kube_client.list_namespaced_pod, self.namespace,
**kwargs):
task = event['object']
self.log.info(
'Event: %s had an event of type %s',
task.metadata.name, event['type']
)
if event['type'] == 'ERROR':
return self.process_error(event)
self.process_status(
task.metadata.name, task.status.phase, task.metadata.labels,
task.metadata.resource_version
)
last_resource_version = task.metadata.resource_version
return last_resource_version
def process_error(self, event):
self.log.error(
'Encountered Error response from k8s list namespaced pod stream => %s',
event
)
raw_object = event['raw_object']
if raw_object['code'] == 410:
self.log.info(
'Kubernetes resource version is too old, must reset to 0 => %s',
raw_object['message']
)
# Return resource version 0
return '0'
raise AirflowException(
'Kubernetes failure for %s with code %s and message: %s',
raw_object['reason'], raw_object['code'], raw_object['message']
)
def process_status(self, pod_id, status, labels, resource_version):
if status == 'Pending':
self.log.info('Event: %s Pending', pod_id)
elif status == 'Failed':
self.log.info('Event: %s Failed', pod_id)
self.watcher_queue.put((pod_id, State.FAILED, labels, resource_version))
elif status == 'Succeeded':
self.log.info('Event: %s Succeeded', pod_id)
self.watcher_queue.put((pod_id, None, labels, resource_version))
elif status == 'Running':
self.log.info('Event: %s is Running', pod_id)
else:
self.log.warn(
'Event: Invalid state: %s on pod: %s with labels: %s with '
'resource_version: %s', status, pod_id, labels, resource_version
)
class AirflowKubernetesScheduler(LoggingMixin):
def __init__(self, kube_config, task_queue, result_queue, session,
kube_client, worker_uuid):
self.log.debug("Creating Kubernetes executor")
self.kube_config = kube_config
self.task_queue = task_queue
self.result_queue = result_queue
self.namespace = self.kube_config.kube_namespace
self.log.debug("Kubernetes using namespace %s", self.namespace)
self.kube_client = kube_client
self.launcher = PodLauncher(kube_client=self.kube_client)
self.worker_configuration = WorkerConfiguration(kube_config=self.kube_config)
self.watcher_queue = multiprocessing.Queue()
self._session = session
self.worker_uuid = worker_uuid
self.kube_watcher = self._make_kube_watcher()
def _make_kube_watcher(self):
resource_version = KubeResourceVersion.get_current_resource_version(self._session)
watcher = KubernetesJobWatcher(self.namespace, self.watcher_queue,
resource_version, self.worker_uuid)
watcher.start()
return watcher
def _health_check_kube_watcher(self):
if self.kube_watcher.is_alive():
pass
else:
self.log.error(
'Error while health checking kube watcher process. '
'Process died for unknown reasons')
self.kube_watcher = self._make_kube_watcher()
def run_next(self, next_job):
"""
The run_next command will check the task_queue for any un-run jobs.
It will then create a unique job-id, launch that job in the cluster,
and store relevant info in the current_jobs map so we can track the job's
status
"""
self.log.info('Kubernetes job is %s', str(next_job))
key, command, kube_executor_config = next_job
dag_id, task_id, execution_date = key
self.log.debug("Kubernetes running for command %s", command)
self.log.debug("Kubernetes launching image %s", self.kube_config.kube_image)
pod = self.worker_configuration.make_pod(
namespace=self.namespace, worker_uuid=self.worker_uuid,
pod_id=self._create_pod_id(dag_id, task_id),
dag_id=dag_id, task_id=task_id,
execution_date=self._datetime_to_label_safe_datestring(execution_date),
airflow_command=command, kube_executor_config=kube_executor_config
)
# the watcher will monitor pods, so we do not block.
self.launcher.run_pod_async(pod)
self.log.debug("Kubernetes Job created!")
def delete_pod(self, pod_id):
if self.kube_config.delete_worker_pods:
try:
self.kube_client.delete_namespaced_pod(
pod_id, self.namespace, body=client.V1DeleteOptions())
except ApiException as e:
# If the pod is already deleted
if e.status != 404:
raise
def sync(self):
"""
The sync function checks the status of all currently running kubernetes jobs.
If a job is completed, it's status is placed in the result queue to
be sent back to the scheduler.
:return:
"""
self._health_check_kube_watcher()
while not self.watcher_queue.empty():
self.process_watcher_task()
def process_watcher_task(self):
pod_id, state, labels, resource_version = self.watcher_queue.get()
self.log.info(
'Attempting to finish pod; pod_id: %s; state: %s; labels: %s',
pod_id, state, labels
)
key = self._labels_to_key(labels=labels)
if key:
self.log.debug('finishing job %s - %s (%s)', key, state, pod_id)
self.result_queue.put((key, state, pod_id, resource_version))
@staticmethod
def _strip_unsafe_kubernetes_special_chars(string):
"""
Kubernetes only supports lowercase alphanumeric characters and "-" and "." in
the pod name
However, there are special rules about how "-" and "." can be used so let's
only keep
alphanumeric chars see here for detail:
https://kubernetes.io/docs/concepts/overview/working-with-objects/names/
:param string: The requested Pod name
:return: ``str`` Pod name stripped of any unsafe characters
"""
return ''.join(ch.lower() for ind, ch in enumerate(string) if ch.isalnum())
@staticmethod
def _make_safe_pod_id(safe_dag_id, safe_task_id, safe_uuid):
"""
Kubernetes pod names must be <= 253 chars and must pass the following regex for
validation
"^[a-z0-9]([-a-z0-9]*[a-z0-9])?(\.[a-z0-9]([-a-z0-9]*[a-z0-9])?)*$"
:param safe_dag_id: a dag_id with only alphanumeric characters
:param safe_task_id: a task_id with only alphanumeric characters
:param random_uuid: a uuid
:return: ``str`` valid Pod name of appropriate length
"""
MAX_POD_ID_LEN = 253
safe_key = safe_dag_id + safe_task_id
safe_pod_id = safe_key[:MAX_POD_ID_LEN - len(safe_uuid) - 1] + "-" + safe_uuid
return safe_pod_id
@staticmethod
def _create_pod_id(dag_id, task_id):
safe_dag_id = AirflowKubernetesScheduler._strip_unsafe_kubernetes_special_chars(
dag_id)
safe_task_id = AirflowKubernetesScheduler._strip_unsafe_kubernetes_special_chars(
task_id)
safe_uuid = AirflowKubernetesScheduler._strip_unsafe_kubernetes_special_chars(
uuid4().hex)
return AirflowKubernetesScheduler._make_safe_pod_id(safe_dag_id, safe_task_id,
safe_uuid)
@staticmethod
def _label_safe_datestring_to_datetime(string):
"""
Kubernetes doesn't permit ":" in labels. ISO datetime format uses ":" but not
"_", let's
replace ":" with "_"
:param string: string
:return: datetime.datetime object
"""
return parser.parse(string.replace('_plus_', '+').replace("_", ":"))
@staticmethod
def _datetime_to_label_safe_datestring(datetime_obj):
"""
Kubernetes doesn't like ":" in labels, since ISO datetime format uses ":" but
not "_" let's
replace ":" with "_"
:param datetime_obj: datetime.datetime object
:return: ISO-like string representing the datetime
"""
return datetime_obj.isoformat().replace(":", "_").replace('+', '_plus_')
def _labels_to_key(self, labels):
try:
return (
labels['dag_id'], labels['task_id'],
self._label_safe_datestring_to_datetime(labels['execution_date']))
except Exception as e:
self.log.warn(
'Error while converting labels to key; labels: %s; exception: %s',
labels, e
)
return None
class KubernetesExecutor(BaseExecutor, LoggingMixin):
def __init__(self):
self.kube_config = KubeConfig()
self.task_queue = None
self._session = None
self.result_queue = None
self.kube_scheduler = None
self.kube_client = None
self.worker_uuid = None
super(KubernetesExecutor, self).__init__(parallelism=self.kube_config.parallelism)
def clear_not_launched_queued_tasks(self):
"""
If the airflow scheduler restarts with pending "Queued" tasks, the tasks may or
may not
have been launched Thus, on starting up the scheduler let's check every
"Queued" task to
see if it has been launched (ie: if there is a corresponding pod on kubernetes)
If it has been launched then do nothing, otherwise reset the state to "None" so
the task
will be rescheduled
This will not be necessary in a future version of airflow in which there is
proper support
for State.LAUNCHED
"""
queued_tasks = self._session.query(
TaskInstance).filter(TaskInstance.state == State.QUEUED).all()
self.log.info(
'When executor started up, found %s queued task instances',
len(queued_tasks)
)
for task in queued_tasks:
dict_string = "dag_id={},task_id={},execution_date={},airflow-worker={}" \
.format(task.dag_id, task.task_id,
AirflowKubernetesScheduler._datetime_to_label_safe_datestring(
task.execution_date), self.worker_uuid)
kwargs = dict(label_selector=dict_string)
pod_list = self.kube_client.list_namespaced_pod(
self.kube_config.kube_namespace, **kwargs)
if len(pod_list.items) == 0:
self.log.info(
'TaskInstance: %s found in queued state but was not launched, '
'rescheduling', task
)
self._session.query(TaskInstance).filter(
TaskInstance.dag_id == task.dag_id,
TaskInstance.task_id == task.task_id,
TaskInstance.execution_date == task.execution_date
).update({TaskInstance.state: State.NONE})
self._session.commit()
def _inject_secrets(self):
def _create_or_update_secret(secret_name, secret_path):
try:
return self.kube_client.create_namespaced_secret(
self.kube_config.executor_namespace, kubernetes.client.V1Secret(
data={
'key.json': base64.b64encode(open(secret_path, 'r').read())},
metadata=kubernetes.client.V1ObjectMeta(name=secret_name)))
except ApiException as e:
if e.status == 409:
return self.kube_client.replace_namespaced_secret(
secret_name, self.kube_config.executor_namespace,
kubernetes.client.V1Secret(
data={'key.json': base64.b64encode(
open(secret_path, 'r').read())},
metadata=kubernetes.client.V1ObjectMeta(name=secret_name)))
self.log.exception(
'Exception while trying to inject secret. '
'Secret name: %s, error details: %s',
secret_name, e
)
raise
# For each GCP service account key, inject it as a secret in executor
# namespace with the specific secret name configured in the airflow.cfg.
# We let exceptions to pass through to users.
if self.kube_config.gcp_service_account_keys:
name_path_pair_list = [
{'name': account_spec.strip().split('=')[0],
'path': account_spec.strip().split('=')[1]}
for account_spec in self.kube_config.gcp_service_account_keys.split(',')]
for service_account in name_path_pair_list:
_create_or_update_secret(service_account['name'], service_account['path'])
def start(self):
self.log.info('Start Kubernetes executor')
self._session = settings.Session()
self.worker_uuid = KubeWorkerIdentifier.get_or_create_current_kube_worker_uuid(
self._session)
self.log.debug('Start with worker_uuid: %s', self.worker_uuid)
# always need to reset resource version since we don't know
# when we last started, note for behavior below
# https://github.com/kubernetes-client/python/blob/master/kubernetes/docs
# /CoreV1Api.md#list_namespaced_pod
KubeResourceVersion.reset_resource_version(self._session)
self.task_queue = Queue()
self.result_queue = Queue()
self.kube_client = get_kube_client()
self.kube_scheduler = AirflowKubernetesScheduler(
self.kube_config, self.task_queue, self.result_queue, self._session,
self.kube_client, self.worker_uuid
)
self._inject_secrets()
self.clear_not_launched_queued_tasks()
def execute_async(self, key, command, queue=None, executor_config=None):
self.log.info(
'Add task %s with command %s with executor_config %s',
key, command, executor_config
)
kube_executor_config = KubernetesExecutorConfig.from_dict(executor_config)
self.task_queue.put((key, command, kube_executor_config))
def sync(self):
if self.running:
self.log.info('self.running: %s', self.running)
if self.queued_tasks:
self.log.info('self.queued: %s', self.queued_tasks)
self.kube_scheduler.sync()
last_resource_version = None
while not self.result_queue.empty():
results = self.result_queue.get()
key, state, pod_id, resource_version = results
last_resource_version = resource_version
self.log.info('Changing state of %s to %s', results, state)
self._change_state(key, state, pod_id)
KubeResourceVersion.checkpoint_resource_version(
last_resource_version, session=self._session)
if not self.task_queue.empty():
key, command, kube_executor_config = self.task_queue.get()
self.kube_scheduler.run_next((key, command, kube_executor_config))
def _change_state(self, key, state, pod_id):
if state != State.RUNNING:
self.kube_scheduler.delete_pod(pod_id)
try:
self.log.info('Deleted pod: %s', str(key))
self.running.pop(key)
except KeyError:
self.log.debug('Could not find key: %s', str(key))
pass
self.event_buffer[key] = state
(dag_id, task_id, ex_time) = key
item = self._session.query(TaskInstance).filter_by(
dag_id=dag_id,
task_id=task_id,
execution_date=ex_time
).one()
if state:
item.state = state
self._session.add(item)
self._session.commit()
def end(self):
self.log.info('Shutting down Kubernetes executor')
self.task_queue.join()
|
apache-2.0
|
inares/edx-platform
|
common/djangoapps/external_auth/tests/test_helper.py
|
165
|
1259
|
"""
Tests for utility functions in external_auth module
"""
from django.test import TestCase
from external_auth.views import _safe_postlogin_redirect
class ExternalAuthHelperFnTest(TestCase):
"""
Unit tests for the external_auth.views helper function
"""
def test__safe_postlogin_redirect(self):
"""
Tests the _safe_postlogin_redirect function with different values of next
"""
HOST = 'testserver' # pylint: disable=invalid-name
ONSITE1 = '/dashboard' # pylint: disable=invalid-name
ONSITE2 = '/courses/org/num/name/courseware' # pylint: disable=invalid-name
ONSITE3 = 'http://{}/my/custom/url'.format(HOST) # pylint: disable=invalid-name
OFFSITE1 = 'http://www.attacker.com' # pylint: disable=invalid-name
for redirect_to in [ONSITE1, ONSITE2, ONSITE3]:
redir = _safe_postlogin_redirect(redirect_to, HOST)
self.assertEqual(redir.status_code, 302)
self.assertEqual(redir['location'], redirect_to)
redir2 = _safe_postlogin_redirect(OFFSITE1, HOST)
self.assertEqual(redir2.status_code, 302)
self.assertEqual("/", redir2['location'])
|
agpl-3.0
|
wiltonlazary/arangodb
|
3rdParty/V8/V8-5.0.71.39/tools/swarming_client/third_party/requests/auth.py
|
46
|
5665
|
# -*- coding: utf-8 -*-
"""
requests.auth
~~~~~~~~~~~~~
This module contains the authentication handlers for Requests.
"""
import os
import re
import time
import hashlib
import logging
from base64 import b64encode
from .compat import urlparse, str
from .utils import parse_dict_header
log = logging.getLogger(__name__)
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1')
class AuthBase(object):
"""Base class that all auth implementations derive from"""
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
def __call__(self, r):
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPProxyAuth(HTTPBasicAuth):
"""Attaches HTTP Proxy Authentication to a given Request object."""
def __call__(self, r):
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
return r
class HTTPDigestAuth(AuthBase):
"""Attaches HTTP Digest Authentication to the given Request object."""
def __init__(self, username, password):
self.username = username
self.password = password
self.last_nonce = ''
self.nonce_count = 0
self.chal = {}
def build_digest_header(self, method, url):
realm = self.chal['realm']
nonce = self.chal['nonce']
qop = self.chal.get('qop')
algorithm = self.chal.get('algorithm')
opaque = self.chal.get('opaque')
if algorithm is None:
_algorithm = 'MD5'
else:
_algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if _algorithm == 'MD5':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.sha1(x).hexdigest()
hash_utf8 = sha_utf8
# XXX MD5-sess
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
if hash_utf8 is None:
return None
# XXX not implemented yet
entdig = None
p_parsed = urlparse(url)
path = p_parsed.path
if p_parsed.query:
path += '?' + p_parsed.query
A1 = '%s:%s:%s' % (self.username, realm, self.password)
A2 = '%s:%s' % (method, path)
if qop is None:
respdig = KD(hash_utf8(A1), "%s:%s" % (nonce, hash_utf8(A2)))
elif qop == 'auth' or 'auth' in qop.split(','):
if nonce == self.last_nonce:
self.nonce_count += 1
else:
self.nonce_count = 1
ncvalue = '%08x' % self.nonce_count
s = str(self.nonce_count).encode('utf-8')
s += nonce.encode('utf-8')
s += time.ctime().encode('utf-8')
s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16])
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, hash_utf8(A2))
respdig = KD(hash_utf8(A1), noncebit)
else:
# XXX handle auth-int.
return None
self.last_nonce = nonce
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if algorithm:
base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
num_401_calls = getattr(self, 'num_401_calls', 1)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and num_401_calls < 2:
setattr(self, 'num_401_calls', num_401_calls + 1)
pat = re.compile(r'digest ', flags=re.IGNORECASE)
self.chal = parse_dict_header(pat.sub('', s_auth, count=1))
# Consume content and release the original connection
# to allow our new request to reuse the same one.
r.content
r.raw.release_conn()
prep = r.request.copy()
prep.prepare_cookies(r.cookies)
prep.headers['Authorization'] = self.build_digest_header(
prep.method, prep.url)
_r = r.connection.send(prep, **kwargs)
_r.history.append(r)
_r.request = prep
return _r
setattr(self, 'num_401_calls', 1)
return r
def __call__(self, r):
# If we have a saved nonce, skip the 401
if self.last_nonce:
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
r.register_hook('response', self.handle_401)
return r
|
apache-2.0
|
embeddedarm/android_external_chromium_org
|
tools/update_reference_build.py
|
24
|
11532
|
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Updates the Chrome reference builds.
Use -r option to update a Chromium reference build, or -b option for Chrome
official builds.
Usage:
$ cd /tmp
$ /path/to/update_reference_build.py -r <revision>
$ cd reference_builds/reference_builds
$ gcl change
$ gcl upload <change>
$ gcl commit <change>
"""
import logging
import optparse
import os
import shutil
import subprocess
import sys
import time
import urllib
import urllib2
import zipfile
# Example chromium build location:
# gs://chromium-browser-snapshots/Linux/228977/chrome-linux.zip
CHROMIUM_URL_FMT = ('http://commondatastorage.googleapis.com/'
'chromium-browser-snapshots/%s/%s/%s')
# Chrome official build storage
# https://wiki.corp.google.com/twiki/bin/view/Main/ChromeOfficialBuilds
# Internal Google archive of official Chrome builds, example:
# https://goto.google.com/chrome_official_builds/
# 32.0.1677.0/precise32bit/chrome-precise32bit.zip
CHROME_INTERNAL_URL_FMT = ('http://master.chrome.corp.google.com/'
'official_builds/%s/%s/%s')
# Google storage location (no public web URL's), example:
# gs://chrome-archive/30/30.0.1595.0/precise32bit/chrome-precise32bit.zip
CHROME_GS_URL_FMT = ('gs://chrome-archive/%s/%s/%s/%s')
class BuildUpdater(object):
_PLATFORM_FILES_MAP = {
'Win': [
'chrome-win32.zip',
'chrome-win32-syms.zip',
],
'Mac': [
'chrome-mac.zip',
],
'Linux': [
'chrome-linux.zip',
],
'Linux_x64': [
'chrome-linux.zip',
],
}
_CHROME_PLATFORM_FILES_MAP = {
'Win': [
'chrome-win32.zip',
'chrome-win32-syms.zip',
],
'Mac': [
'chrome-mac.zip',
],
'Linux': [
'chrome-precise32bit.zip',
],
'Linux_x64': [
'chrome-precise64bit.zip',
],
}
# Map of platform names to gs:// Chrome build names.
_BUILD_PLATFORM_MAP = {
'Linux': 'precise32bit',
'Linux_x64': 'precise64bit',
'Win': 'win',
'Mac': 'mac',
}
_PLATFORM_DEST_MAP = {
'Linux': 'chrome_linux',
'Linux_x64': 'chrome_linux64',
'Win': 'chrome_win',
'Mac': 'chrome_mac',
}
def __init__(self, options):
self._platforms = options.platforms.split(',')
self._revision = options.build_number or int(options.revision)
self._use_build_number = bool(options.build_number)
self._use_gs = options.use_gs
@staticmethod
def _GetCmdStatusAndOutput(args, cwd=None, shell=False):
"""Executes a subprocess and returns its exit code and output.
Args:
args: A string or a sequence of program arguments.
cwd: If not None, the subprocess's current directory will be changed to
|cwd| before it's executed.
shell: Whether to execute args as a shell command.
Returns:
The tuple (exit code, output).
"""
logging.info(str(args) + ' ' + (cwd or ''))
p = subprocess.Popen(args=args, cwd=cwd, stdout=subprocess.PIPE,
stderr=subprocess.PIPE, shell=shell)
stdout, stderr = p.communicate()
exit_code = p.returncode
if stderr:
logging.critical(stderr)
logging.info(stdout)
return (exit_code, stdout)
def _GetBuildUrl(self, platform, revision, filename):
if self._use_build_number:
# Chrome Google storage bucket.
if self._use_gs:
release = revision[:revision.find('.')]
return (CHROME_GS_URL_FMT % (
release,
revision,
self._BUILD_PLATFORM_MAP[platform],
filename))
# Chrome internal archive.
return (CHROME_INTERNAL_URL_FMT % (
revision,
self._BUILD_PLATFORM_MAP[platform],
filename))
# Chromium archive.
return CHROMIUM_URL_FMT % (urllib.quote_plus(platform), revision, filename)
def _FindBuildRevision(self, platform, revision, filename):
# TODO(shadi): Iterate over build numbers to find a valid one.
if self._use_build_number:
return (revision
if self._DoesBuildExist(platform, revision, filename) else None)
MAX_REVISIONS_PER_BUILD = 100
for revision_guess in xrange(revision, revision + MAX_REVISIONS_PER_BUILD):
if self._DoesBuildExist(platform, revision_guess, filename):
return revision_guess
else:
time.sleep(.1)
return None
def _DoesBuildExist(self, platform, build_number, filename):
url = self._GetBuildUrl(platform, build_number, filename)
if self._use_gs:
return self._DoesGSFileExist(url)
r = urllib2.Request(url)
r.get_method = lambda: 'HEAD'
try:
urllib2.urlopen(r)
return True
except urllib2.HTTPError, err:
if err.code == 404:
return False
def _DoesGSFileExist(self, gs_file_name):
exit_code = BuildUpdater._GetCmdStatusAndOutput(
['gsutil', 'ls', gs_file_name])[0]
return not exit_code
def _GetPlatformFiles(self, platform):
if self._use_build_number:
return BuildUpdater._CHROME_PLATFORM_FILES_MAP[platform]
return BuildUpdater._PLATFORM_FILES_MAP[platform]
def _DownloadBuilds(self):
for platform in self._platforms:
for f in self._GetPlatformFiles(platform):
output = os.path.join('dl', platform,
'%s_%s_%s' % (platform, self._revision, f))
if os.path.exists(output):
logging.info('%s alread exists, skipping download', output)
continue
build_revision = self._FindBuildRevision(platform, self._revision, f)
if not build_revision:
logging.critical('Failed to find %s build for r%s\n', platform,
self._revision)
sys.exit(1)
dirname = os.path.dirname(output)
if dirname and not os.path.exists(dirname):
os.makedirs(dirname)
url = self._GetBuildUrl(platform, build_revision, f)
self._DownloadFile(url, output)
def _DownloadFile(self, url, output):
logging.info('Downloading %s, saving to %s', url, output)
if self._use_build_number and self._use_gs:
BuildUpdater._GetCmdStatusAndOutput(['gsutil', 'cp', url, output])
else:
r = urllib2.urlopen(url)
with file(output, 'wb') as f:
f.write(r.read())
def _FetchSvnRepos(self):
if not os.path.exists('reference_builds'):
os.makedirs('reference_builds')
BuildUpdater._GetCmdStatusAndOutput(
['gclient', 'config',
'svn://svn.chromium.org/chrome/trunk/deps/reference_builds'],
'reference_builds')
BuildUpdater._GetCmdStatusAndOutput(
['gclient', 'sync'], 'reference_builds')
def _UnzipFile(self, dl_file, dest_dir):
if not zipfile.is_zipfile(dl_file):
return False
logging.info('Opening %s', dl_file)
with zipfile.ZipFile(dl_file, 'r') as z:
for content in z.namelist():
dest = os.path.join(dest_dir, content[content.find('/')+1:])
# Create dest parent dir if it does not exist.
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
# If dest is just a dir listing, do nothing.
if not os.path.basename(dest):
continue
if not os.path.isdir(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
with z.open(content) as unzipped_content:
logging.info('Extracting %s to %s (%s)', content, dest, dl_file)
with file(dest, 'wb') as dest_file:
dest_file.write(unzipped_content.read())
permissions = z.getinfo(content).external_attr >> 16
if permissions:
os.chmod(dest, permissions)
return True
def _ClearDir(self, dir):
"""Clears all files in |dir| except for hidden files and folders."""
for root, dirs, files in os.walk(dir):
# Skip hidden files and folders (like .svn and .git).
files = [f for f in files if f[0] != '.']
dirs[:] = [d for d in dirs if d[0] != '.']
for f in files:
os.remove(os.path.join(root, f))
def _ExtractBuilds(self):
for platform in self._platforms:
if os.path.exists('tmp_unzip'):
os.path.unlink('tmp_unzip')
dest_dir = os.path.join('reference_builds', 'reference_builds',
BuildUpdater._PLATFORM_DEST_MAP[platform])
self._ClearDir(dest_dir)
for root, _, dl_files in os.walk(os.path.join('dl', platform)):
for dl_file in dl_files:
dl_file = os.path.join(root, dl_file)
if not self._UnzipFile(dl_file, dest_dir):
logging.info('Copying %s to %s', dl_file, dest_dir)
shutil.copy(dl_file, dest_dir)
def _SvnAddAndRemove(self):
svn_dir = os.path.join('reference_builds', 'reference_builds')
# List all changes without ignoring any files.
stat = BuildUpdater._GetCmdStatusAndOutput(['svn', 'stat', '--no-ignore'],
svn_dir)[1]
for line in stat.splitlines():
action, filename = line.split(None, 1)
# Add new and ignored files.
if action == '?' or action == 'I':
BuildUpdater._GetCmdStatusAndOutput(
['svn', 'add', filename], svn_dir)
elif action == '!':
BuildUpdater._GetCmdStatusAndOutput(
['svn', 'delete', filename], svn_dir)
filepath = os.path.join(svn_dir, filename)
if not os.path.isdir(filepath) and os.access(filepath, os.X_OK):
BuildUpdater._GetCmdStatusAndOutput(
['svn', 'propset', 'svn:executable', 'true', filename], svn_dir)
def DownloadAndUpdateBuilds(self):
self._DownloadBuilds()
self._FetchSvnRepos()
self._ExtractBuilds()
self._SvnAddAndRemove()
def ParseOptions(argv):
parser = optparse.OptionParser()
usage = 'usage: %prog <options>'
parser.set_usage(usage)
parser.add_option('-b', dest='build_number',
help='Chrome official build number to pick up.')
parser.add_option('--gs', dest='use_gs', action='store_true', default=False,
help='Use Google storage for official builds. Used with -b '
'option. Default is false (i.e. use internal storage.')
parser.add_option('-p', dest='platforms',
default='Win,Mac,Linux,Linux_x64',
help='Comma separated list of platforms to download '
'(as defined by the chromium builders).')
parser.add_option('-r', dest='revision',
help='Revision to pick up.')
(options, _) = parser.parse_args(argv)
if not options.revision and not options.build_number:
logging.critical('Must specify either -r or -b.\n')
sys.exit(1)
if options.revision and options.build_number:
logging.critical('Must specify either -r or -b but not both.\n')
sys.exit(1)
if options.use_gs and not options.build_number:
logging.critical('Can only use --gs with -b option.\n')
sys.exit(1)
return options
def main(argv):
logging.getLogger().setLevel(logging.DEBUG)
options = ParseOptions(argv)
b = BuildUpdater(options)
b.DownloadAndUpdateBuilds()
logging.info('Successfully updated reference builds. Move to '
'reference_builds/reference_builds and make a change with gcl.')
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
|
mbrukman/libcloud
|
libcloud/test/storage/test_atmos.py
|
46
|
32319
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import os.path
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import urlparse
from libcloud.utils.py3 import b
import libcloud.utils.files
from libcloud.common.types import LibcloudError
from libcloud.storage.base import Container, Object
from libcloud.storage.types import ContainerAlreadyExistsError, \
ContainerDoesNotExistError, \
ContainerIsNotEmptyError, \
ObjectDoesNotExistError
from libcloud.storage.drivers.atmos import AtmosConnection, AtmosDriver
from libcloud.storage.drivers.dummy import DummyIterator
from libcloud.test import StorageMockHttp, MockRawResponse
from libcloud.test.file_fixtures import StorageFileFixtures
class AtmosTests(unittest.TestCase):
def setUp(self):
AtmosDriver.connectionCls.conn_classes = (None, AtmosMockHttp)
AtmosDriver.connectionCls.rawResponseCls = AtmosMockRawResponse
AtmosDriver.path = ''
AtmosMockHttp.type = None
AtmosMockHttp.upload_created = False
AtmosMockRawResponse.type = None
self.driver = AtmosDriver('dummy', base64.b64encode(b('dummy')))
self._remove_test_file()
def tearDown(self):
self._remove_test_file()
def _remove_test_file(self):
file_path = os.path.abspath(__file__) + '.temp'
try:
os.unlink(file_path)
except OSError:
pass
def test_list_containers(self):
AtmosMockHttp.type = 'EMPTY'
containers = self.driver.list_containers()
self.assertEqual(len(containers), 0)
AtmosMockHttp.type = None
containers = self.driver.list_containers()
self.assertEqual(len(containers), 6)
def test_list_container_objects(self):
container = Container(name='test_container', extra={},
driver=self.driver)
AtmosMockHttp.type = 'EMPTY'
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 0)
AtmosMockHttp.type = None
objects = self.driver.list_container_objects(container=container)
self.assertEqual(len(objects), 2)
obj = [o for o in objects if o.name == 'not-a-container1'][0]
self.assertEqual(obj.meta_data['object_id'],
'651eae32634bf84529c74eabd555fda48c7cead6')
self.assertEqual(obj.container.name, 'test_container')
def test_get_container(self):
container = self.driver.get_container(container_name='test_container')
self.assertEqual(container.name, 'test_container')
self.assertEqual(container.extra['object_id'],
'b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9')
def test_get_container_escaped(self):
container = self.driver.get_container(
container_name='test & container')
self.assertEqual(container.name, 'test & container')
self.assertEqual(container.extra['object_id'],
'b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9')
def test_get_container_not_found(self):
try:
self.driver.get_container(container_name='not_found')
except ContainerDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_create_container_success(self):
container = self.driver.create_container(
container_name='test_create_container')
self.assertTrue(isinstance(container, Container))
self.assertEqual(container.name, 'test_create_container')
self.assertEqual(container.extra['object_id'],
'31a27b593629a3fe59f887fd973fd953e80062ce')
def test_create_container_already_exists(self):
AtmosMockHttp.type = 'ALREADY_EXISTS'
try:
self.driver.create_container(
container_name='test_create_container')
except ContainerAlreadyExistsError:
pass
else:
self.fail(
'Container already exists but an exception was not thrown')
def test_delete_container_success(self):
container = Container(name='foo_bar_container', extra={}, driver=self)
result = self.driver.delete_container(container=container)
self.assertTrue(result)
def test_delete_container_not_found(self):
AtmosMockHttp.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={}, driver=self)
try:
self.driver.delete_container(container=container)
except ContainerDoesNotExistError:
pass
else:
self.fail(
'Container does not exist but an exception was not thrown')
def test_delete_container_not_empty(self):
AtmosMockHttp.type = 'NOT_EMPTY'
container = Container(name='foo_bar_container', extra={}, driver=self)
try:
self.driver.delete_container(container=container)
except ContainerIsNotEmptyError:
pass
else:
self.fail('Container is not empty but an exception was not thrown')
def test_get_object_success(self):
obj = self.driver.get_object(container_name='test_container',
object_name='test_object')
self.assertEqual(obj.container.name, 'test_container')
self.assertEqual(obj.size, 555)
self.assertEqual(obj.hash, '6b21c4a111ac178feacf9ec9d0c71f17')
self.assertEqual(obj.extra['object_id'],
'322dce3763aadc41acc55ef47867b8d74e45c31d6643')
self.assertEqual(
obj.extra['last_modified'], 'Tue, 25 Jan 2011 22:01:49 GMT')
self.assertEqual(obj.meta_data['foo-bar'], 'test 1')
self.assertEqual(obj.meta_data['bar-foo'], 'test 2')
def test_get_object_escaped(self):
obj = self.driver.get_object(container_name='test & container',
object_name='test & object')
self.assertEqual(obj.container.name, 'test & container')
self.assertEqual(obj.size, 555)
self.assertEqual(obj.hash, '6b21c4a111ac178feacf9ec9d0c71f17')
self.assertEqual(obj.extra['object_id'],
'322dce3763aadc41acc55ef47867b8d74e45c31d6643')
self.assertEqual(
obj.extra['last_modified'], 'Tue, 25 Jan 2011 22:01:49 GMT')
self.assertEqual(obj.meta_data['foo-bar'], 'test 1')
self.assertEqual(obj.meta_data['bar-foo'], 'test 2')
def test_get_object_not_found(self):
try:
self.driver.get_object(container_name='test_container',
object_name='not_found')
except ObjectDoesNotExistError:
pass
else:
self.fail('Exception was not thrown')
def test_delete_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
status = self.driver.delete_object(obj=obj)
self.assertTrue(status)
def test_delete_object_escaped_success(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
status = self.driver.delete_object(obj=obj)
self.assertTrue(status)
def test_delete_object_not_found(self):
AtmosMockHttp.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
try:
self.driver.delete_object(obj=obj)
except ObjectDoesNotExistError:
pass
else:
self.fail('Object does not exist but an exception was not thrown')
def test_download_object_success(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_escaped_success(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
result = self.driver.download_object(obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
self.assertTrue(result)
def test_download_object_success_not_found(self):
AtmosMockRawResponse.type = 'NOT_FOUND'
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container,
meta_data=None,
driver=self.driver)
destination_path = os.path.abspath(__file__) + '.temp'
try:
self.driver.download_object(
obj=obj,
destination_path=destination_path,
overwrite_existing=False,
delete_on_failure=True)
except ObjectDoesNotExistError:
pass
else:
self.fail('Object does not exist but an exception was not thrown')
def test_download_object_as_stream(self):
container = Container(name='foo_bar_container', extra={},
driver=self.driver)
obj = Object(name='foo_bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
stream = self.driver.download_object_as_stream(
obj=obj, chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_download_object_as_stream_escaped(self):
container = Container(name='foo & bar_container', extra={},
driver=self.driver)
obj = Object(name='foo & bar_object', size=1000, hash=None, extra={},
container=container, meta_data=None,
driver=self.driver)
stream = self.driver.download_object_as_stream(
obj=obj, chunk_size=None)
self.assertTrue(hasattr(stream, '__iter__'))
def test_upload_object_success(self):
def upload_file(self, response, file_path, chunked=False,
calculate_hash=True):
return True, 'hash343hhash89h932439jsaa89', 1000
old_func = AtmosDriver._upload_file
AtmosDriver._upload_file = upload_file
path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
extra = {'meta_data': {'some-value': 'foobar'}}
obj = self.driver.upload_object(file_path=path, container=container,
extra=extra, object_name=object_name)
self.assertEqual(obj.name, 'ftu')
self.assertEqual(obj.size, 1000)
self.assertTrue('some-value' in obj.meta_data)
AtmosDriver._upload_file = old_func
def test_upload_object_no_content_type(self):
def no_content_type(name):
return None, None
old_func = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = no_content_type
file_path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
obj = self.driver.upload_object(file_path=file_path,
container=container,
object_name=object_name)
# Just check that the file was uploaded OK, as the fallback
# Content-Type header should be set (application/octet-stream).
self.assertEqual(obj.name, object_name)
libcloud.utils.files.guess_file_mime_type = old_func
def test_upload_object_error(self):
def dummy_content_type(name):
return 'application/zip', None
def send(instance):
raise Exception('')
old_func1 = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = dummy_content_type
old_func2 = AtmosMockHttp.send
AtmosMockHttp.send = send
file_path = os.path.abspath(__file__)
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
try:
self.driver.upload_object(
file_path=file_path,
container=container,
object_name=object_name)
except LibcloudError:
pass
else:
self.fail(
'Timeout while uploading but an exception was not thrown')
finally:
libcloud.utils.files.guess_file_mime_type = old_func1
AtmosMockHttp.send = old_func2
def test_upload_object_nonexistent_file(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.utils.files.guess_file_mime_type
libcloud.utils.files.guess_file_mime_type = dummy_content_type
file_path = os.path.abspath(__file__ + '.inexistent')
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftu'
try:
self.driver.upload_object(
file_path=file_path,
container=container,
object_name=object_name)
except OSError:
pass
else:
self.fail('Inesitent but an exception was not thrown')
finally:
libcloud.utils.files.guess_file_mime_type = old_func
def test_upload_object_via_stream_new_object(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = dummy_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsdn'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_upload_object_via_stream_existing_object(self):
def dummy_content_type(name):
return 'application/zip', None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = dummy_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsde'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_upload_object_via_stream_no_content_type(self):
def no_content_type(name):
return None, None
old_func = libcloud.storage.drivers.atmos.guess_file_mime_type
libcloud.storage.drivers.atmos.guess_file_mime_type = no_content_type
container = Container(name='fbc', extra={}, driver=self)
object_name = 'ftsdct'
iterator = DummyIterator(data=['2', '3', '5'])
try:
self.driver.upload_object_via_stream(container=container,
object_name=object_name,
iterator=iterator)
except AttributeError:
pass
else:
self.fail(
'File content type not provided'
' but an exception was not thrown')
finally:
libcloud.storage.drivers.atmos.guess_file_mime_type = old_func
def test_signature_algorithm(self):
test_uid = 'fredsmagicuid'
test_key = base64.b64encode(b('ssssshhhhhmysecretkey'))
test_date = 'Mon, 04 Jul 2011 07:39:19 GMT'
test_values = [
('GET', '/rest/namespace/foo', '', {},
'WfSASIA25TuqO2n0aO9k/dtg6S0='),
('GET', '/rest/namespace/foo%20%26%20bar', '', {},
'vmlqXqcInxxoP4YX5mR09BonjX4='),
('POST', '/rest/namespace/foo', '', {},
'oYKdsF+1DOuUT7iX5CJCDym2EQk='),
('PUT', '/rest/namespace/foo', '', {},
'JleF9dpSWhaT3B2swZI3s41qqs4='),
('DELETE', '/rest/namespace/foo', '', {},
'2IX+Bd5XZF5YY+g4P59qXV1uLpo='),
('GET', '/rest/namespace/foo?metata/system', '', {},
'zuHDEAgKM1winGnWn3WBsqnz4ks='),
('POST', '/rest/namespace/foo?metadata/user', '', {
'x-emc-meta': 'fakemeta=fake, othermeta=faketoo'
}, '7sLx1nxPIRAtocfv02jz9h1BjbU='),
]
class FakeDriver(object):
path = ''
for method, action, api_path, headers, expected in test_values:
c = AtmosConnection(test_uid, test_key)
c.method = method
c.action = action
d = FakeDriver()
d.path = api_path
c.driver = d
headers = c.add_default_headers(headers)
headers['Date'] = headers['x-emc-date'] = test_date
self.assertEqual(c._calculate_signature({}, headers),
b(expected).decode('utf-8'))
class AtmosMockHttp(StorageMockHttp, unittest.TestCase):
fixtures = StorageFileFixtures('atmos')
upload_created = False
upload_stream_created = False
def __init__(self, *args, **kwargs):
unittest.TestCase.__init__(self)
if kwargs.get('host', None) and kwargs.get('port', None):
StorageMockHttp.__init__(self, *args, **kwargs)
self._upload_object_via_stream_first_request = True
def runTest(self):
pass
def request(self, method, url, body=None, headers=None, raw=False):
headers = headers or {}
parsed = urlparse.urlparse(url)
if parsed.query.startswith('metadata/'):
parsed = list(parsed)
parsed[2] = parsed[2] + '/' + parsed[4]
parsed[4] = ''
url = urlparse.urlunparse(parsed)
return super(AtmosMockHttp, self).request(method, url, body, headers,
raw)
def _rest_namespace_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('empty_directory_listing.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container_EMPTY(self, method, url, body, headers):
body = self.fixtures.load('empty_directory_listing.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container(self, method, url, body, headers):
body = self.fixtures.load('list_containers.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_test_container__metadata_system(
self, method, url, body,
headers):
headers = {
'x-emc-meta': 'objectid=b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container__metadata_system(
self, method, url, body,
headers):
headers = {
'x-emc-meta': 'objectid=b21cb59a2ba339d1afdd4810010b0a5aba2ab6b9'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_not_found__metadata_system(self, method, url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_test_create_container(self, method, url, body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_test_create_container__metadata_system(self, method,
url, body,
headers):
headers = {
'x-emc-meta': 'objectid=31a27b593629a3fe59f887fd973fd953e80062ce'
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_create_container_ALREADY_EXISTS(self, method, url,
body, headers):
body = self.fixtures.load('already_exists.xml')
return (httplib.BAD_REQUEST, body, {},
httplib.responses[httplib.BAD_REQUEST])
def _rest_namespace_foo_bar_container(self, method, url, body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_NOT_FOUND(self, method, url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_foo_bar_container_NOT_EMPTY(self, method, url, body,
headers):
body = self.fixtures.load('not_empty.xml')
return (httplib.BAD_REQUEST, body, {},
httplib.responses[httplib.BAD_REQUEST])
def _rest_namespace_test_container_test_object_metadata_system(
self, method,
url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container_test_20_26_20object_metadata_system(
self, method,
url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_container_test_object_metadata_user(self, method,
url, body,
headers):
meta = {
'md5': '6b21c4a111ac178feacf9ec9d0c71f17',
'foo-bar': 'test 1',
'bar-foo': 'test 2',
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_20_26_20container_test_20_26_20object_metadata_user(
self, method,
url, body,
headers):
meta = {
'md5': '6b21c4a111ac178feacf9ec9d0c71f17',
'foo-bar': 'test 1',
'bar-foo': 'test 2',
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_test_container_not_found_metadata_system(self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_foo_bar_container_foo_bar_object(self, method, url,
body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object(
self, method, url,
body, headers):
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_foo_bar_object_NOT_FOUND(
self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_fbc_ftu_metadata_system(self, method, url, body,
headers):
if not self.upload_created:
self.__class__.upload_created = True
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
self.__class__.upload_created = False
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftu_metadata_user(self, method, url, body, headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn_metadata_system(self, method, url, body,
headers):
if not self.upload_stream_created:
self.__class__.upload_stream_created = True
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
self.__class__.upload_stream_created = False
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn(self, method, url, body, headers):
if self._upload_object_via_stream_first_request:
self.assertTrue('Range' not in headers)
self.assertEqual(method, 'POST')
self._upload_object_via_stream_first_request = False
else:
self.assertTrue('Range' in headers)
self.assertEqual(method, 'PUT')
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsdn_metadata_user(self, method, url, body,
headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde_metadata_system(self, method, url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde(self, method, url, body, headers):
if self._upload_object_via_stream_first_request:
self.assertTrue('Range' not in headers)
self._upload_object_via_stream_first_request = False
else:
self.assertTrue('Range' in headers)
self.assertEqual(method, 'PUT')
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsde_metadata_user(self, method, url, body,
headers):
self.assertTrue('x-emc-meta' in headers)
return (httplib.OK, '', {}, httplib.responses[httplib.OK])
def _rest_namespace_fbc_ftsd_metadata_system(self, method, url, body,
headers):
meta = {
'objectid': '322dce3763aadc41acc55ef47867b8d74e45c31d6643',
'size': '555',
'mtime': '2011-01-25T22:01:49Z'
}
headers = {
'x-emc-meta': ', '.join([k + '=' + v for k, v in list(meta.items())])
}
return (httplib.OK, '', headers, httplib.responses[httplib.OK])
class AtmosMockRawResponse(MockRawResponse):
fixtures = StorageFileFixtures('atmos')
def _rest_namespace_foo_bar_container_foo_bar_object(self, method, url,
body, headers):
body = self._generate_random_data(1000)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_20_26_20bar_container_foo_20_26_20bar_object(
self, method, url,
body, headers):
body = self._generate_random_data(1000)
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _rest_namespace_foo_bar_container_foo_bar_object_NOT_FOUND(
self, method,
url, body,
headers):
body = self.fixtures.load('not_found.xml')
return (httplib.NOT_FOUND, body, {},
httplib.responses[httplib.NOT_FOUND])
def _rest_namespace_fbc_ftu(self, method, url, body, headers):
return (httplib.CREATED, '', {}, httplib.responses[httplib.CREATED])
if __name__ == '__main__':
sys.exit(unittest.main())
|
apache-2.0
|
edxzw/edx-platform
|
openedx/core/djangoapps/user_api/tests/test_middleware.py
|
152
|
4415
|
"""Tests for user API middleware"""
from mock import Mock, patch
from unittest import TestCase
from django.http import HttpResponse
from django.test.client import RequestFactory
from student.tests.factories import UserFactory, AnonymousUserFactory
from ..tests.factories import UserCourseTagFactory
from ..middleware import UserTagsEventContextMiddleware
class TagsMiddlewareTest(TestCase):
"""
Test the UserTagsEventContextMiddleware
"""
def setUp(self):
super(TagsMiddlewareTest, self).setUp()
self.middleware = UserTagsEventContextMiddleware()
self.user = UserFactory.create()
self.other_user = UserFactory.create()
self.course_id = 'mock/course/id'
self.request_factory = RequestFactory()
# TODO: Make it so we can use reverse. Appears to fail depending on the order in which tests are run
#self.request = RequestFactory().get(reverse('courseware', kwargs={'course_id': self.course_id}))
self.request = RequestFactory().get('/courses/{}/courseware'.format(self.course_id))
self.request.user = self.user
self.response = Mock(spec=HttpResponse)
patcher = patch('openedx.core.djangoapps.user_api.middleware.tracker')
self.tracker = patcher.start()
self.addCleanup(patcher.stop)
def process_request(self):
"""
Execute process request using the request, and verify that it returns None
so that the request continues.
"""
# Middleware should pass request through
self.assertEquals(self.middleware.process_request(self.request), None)
def assertContextSetTo(self, context):
"""Asserts UserTagsEventContextMiddleware.CONTEXT_NAME matches ``context``"""
self.tracker.get_tracker.return_value.enter_context.assert_called_with( # pylint: disable=maybe-no-member
UserTagsEventContextMiddleware.CONTEXT_NAME,
context
)
def test_tag_context(self):
for key, value in (('int_value', 1), ('str_value', "two")):
UserCourseTagFactory.create(
course_id=self.course_id,
user=self.user,
key=key,
value=value,
)
UserCourseTagFactory.create(
course_id=self.course_id,
user=self.other_user,
key="other_user",
value="other_user_value"
)
UserCourseTagFactory.create(
course_id='other/course/id',
user=self.user,
key="other_course",
value="other_course_value"
)
self.process_request()
self.assertContextSetTo({
'course_id': self.course_id,
'course_user_tags': {
'int_value': '1',
'str_value': 'two',
}
})
def test_no_tags(self):
self.process_request()
self.assertContextSetTo({'course_id': self.course_id, 'course_user_tags': {}})
def test_not_course_url(self):
self.request = self.request_factory.get('/not/a/course/url')
self.request.user = self.user
self.process_request()
self.assertContextSetTo({})
def test_invalid_course_id(self):
self.request = self.request_factory.get('/courses/edX/101/')
self.request.user = self.user
self.process_request()
self.assertContextSetTo({})
def test_anonymous_user(self):
self.request.user = AnonymousUserFactory()
self.process_request()
self.assertContextSetTo({'course_id': self.course_id, 'course_user_tags': {}})
def test_remove_context(self):
get_tracker = self.tracker.get_tracker # pylint: disable=maybe-no-member
exit_context = get_tracker.return_value.exit_context
# The middleware should clean up the context when the request is done
self.assertEquals(
self.middleware.process_response(self.request, self.response),
self.response
)
exit_context.assert_called_with(UserTagsEventContextMiddleware.CONTEXT_NAME)
exit_context.reset_mock()
# Even if the tracker blows up, the middleware should still return the response
get_tracker.side_effect = Exception
self.assertEquals(
self.middleware.process_response(self.request, self.response),
self.response
)
|
agpl-3.0
|
crmccreary/openerp_server
|
openerp/addons/project_scrum/report/sprint_burndown.py
|
9
|
3896
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import StringIO
import pooler
from report.render import render
from report.interface import report_int
from datetime import datetime
import time
from pychart import *
import pychart.legend
import _burndown
class report_tasks(report_int):
def create(self, cr, uid, ids, datas, context=None):
if context is None:
context = {}
io = StringIO.StringIO()
canv = canvas.init(fname=io, format='pdf')
canv.set_author("OpenERP")
canv.set_title("Burndown Chart")
pool = pooler.get_pool(cr.dbname)
sprint_pool = pool.get('project.scrum.sprint')
task_pool = pool.get('project.task')
# For add the report header on the top of the report.
tb = text_box.T(loc=(320, 500), text="/hL/15/bBurndown Chart", line_style=None)
tb.draw()
int_to_date = lambda x: '/a60{}' + datetime(time.localtime(x).tm_year, time.localtime(x).tm_mon, time.localtime(x).tm_mday).strftime('%d %m %Y')
for sprint in sprint_pool.browse(cr, uid, ids, context=context):
task_ids = task_pool.search(cr, uid, [('sprint_id','=',sprint.id)], context=context)
datas = _burndown.compute_burndown(cr, uid, task_ids, sprint.date_start, sprint.date_stop)
max_hour = reduce(lambda x,y: max(y[1],x), datas, 0) or None
def _interval_get(*args):
result = []
for i in range(20):
d = time.localtime(datas[0][0] + (((datas[-1][0]-datas[0][0])/20)*(i+1)))
res = time.mktime(d)
if (not result) or result[-1]<>res:
result.append(res)
return result
guideline__data=[(datas[0][0],max_hour), (datas[-1][0],0)]
ar = area.T(x_grid_style=line_style.gray50_dash1,
x_axis=axis.X(label="Date", format=int_to_date),
y_axis=axis.Y(label="Burndown Chart - Planned Hours"),
x_grid_interval=_interval_get,
x_range = (datas[0][0],datas[-1][0]),
y_range = (0,max_hour),
legend = None,
size = (680,450))
ar.add_plot(line_plot.T(data=guideline__data, line_style=line_style.red))
ar.add_plot(line_plot.T(data=datas, line_style=line_style.green))
entr1 = pychart.legend.Entry(label="guideline", line_style=line_style.red)
entr2 = pychart.legend.Entry(label="burndownchart",line_style=line_style.green)
legend = pychart.legend.T(nr_rows=2, inter_row_sep=5)
legend.draw(ar,[entr1,entr2],canv)
ar.draw(canv)
canv.close()
self.obj = _burndown.external_pdf(io.getvalue())
self.obj.render()
return (self.obj.pdf, 'pdf')
report_tasks('report.scrum.sprint.burndown')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
rvalyi/geraldo
|
site/newsite/site-geraldo/django/contrib/sitemaps/views.py
|
55
|
1891
|
from django.http import HttpResponse, Http404
from django.template import loader
from django.contrib.sites.models import Site
from django.core import urlresolvers
from django.utils.encoding import smart_str
from django.core.paginator import EmptyPage, PageNotAnInteger
def index(request, sitemaps):
current_site = Site.objects.get_current()
sites = []
protocol = request.is_secure() and 'https' or 'http'
for section, site in sitemaps.items():
if callable(site):
pages = site().paginator.num_pages
else:
pages = site.paginator.num_pages
sitemap_url = urlresolvers.reverse('django.contrib.sitemaps.views.sitemap', kwargs={'section': section})
sites.append('%s://%s%s' % (protocol, current_site.domain, sitemap_url))
if pages > 1:
for page in range(2, pages+1):
sites.append('%s://%s%s?p=%s' % (protocol, current_site.domain, sitemap_url, page))
xml = loader.render_to_string('sitemap_index.xml', {'sitemaps': sites})
return HttpResponse(xml, mimetype='application/xml')
def sitemap(request, sitemaps, section=None):
maps, urls = [], []
if section is not None:
if section not in sitemaps:
raise Http404("No sitemap available for section: %r" % section)
maps.append(sitemaps[section])
else:
maps = sitemaps.values()
page = request.GET.get("p", 1)
for site in maps:
try:
if callable(site):
urls.extend(site().get_urls(page))
else:
urls.extend(site.get_urls(page))
except EmptyPage:
raise Http404("Page %s empty" % page)
except PageNotAnInteger:
raise Http404("No page '%s'" % page)
xml = smart_str(loader.render_to_string('sitemap.xml', {'urlset': urls}))
return HttpResponse(xml, mimetype='application/xml')
|
lgpl-3.0
|
techhat/libcloud
|
libcloud/compute/drivers/ktucloud.py
|
56
|
3606
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.compute.providers import Provider
from libcloud.compute.base import Node, NodeImage, NodeSize
from libcloud.compute.drivers.cloudstack import CloudStackNodeDriver
class KTUCloudNodeDriver(CloudStackNodeDriver):
"Driver for KTUCloud Compute platform."
EMPTY_DISKOFFERINGID = '0'
type = Provider.KTUCLOUD
name = 'KTUCloud'
website = 'https://ucloudbiz.olleh.com/'
def list_images(self, location=None):
args = {
'templatefilter': 'executable'
}
if location is not None:
args['zoneid'] = location.id
imgs = self._sync_request(command='listAvailableProductTypes',
method='GET')
images = []
for img in imgs['producttypes']:
images.append(
NodeImage(
img['serviceofferingid'],
img['serviceofferingdesc'],
self,
{'hypervisor': '',
'format': '',
'os': img['templatedesc'],
'templateid': img['templateid'],
'zoneid': img['zoneid']}
)
)
return images
def list_sizes(self, location=None):
szs = self._sync_request('listAvailableProductTypes')
sizes = []
for sz in szs['producttypes']:
diskofferingid = sz.get('diskofferingid',
self.EMPTY_DISKOFFERINGID)
sizes.append(NodeSize(
diskofferingid,
sz['diskofferingdesc'],
0, 0, 0, 0, self)
)
return sizes
def create_node(self, name, size, image, location=None, **kwargs):
params = {'displayname': name,
'serviceofferingid': image.id,
'templateid': str(image.extra['templateid']),
'zoneid': str(image.extra['zoneid'])}
usageplantype = kwargs.pop('usageplantype', None)
if usageplantype is None:
params['usageplantype'] = 'hourly'
else:
params['usageplantype'] = usageplantype
if size.id != self.EMPTY_DISKOFFERINGID:
params['diskofferingid'] = size.id
result = self._async_request(
command='deployVirtualMachine',
params=params,
method='GET')
node = result['virtualmachine']
return Node(
id=node['id'],
name=node['displayname'],
state=self.NODE_STATE_MAP[node['state']],
public_ips=[],
private_ips=[],
driver=self,
extra={
'zoneid': image.extra['zoneid'],
'ip_addresses': [],
'forwarding_rules': [],
}
)
|
apache-2.0
|
rabipanda/tensorflow
|
tensorflow/contrib/distributions/python/kernel_tests/mixture_test.py
|
21
|
38286
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Mixture distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import numpy as np
from scipy import stats
from tensorflow.contrib import distributions
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
ds = distributions
def _swap_first_last_axes(array):
rank = len(array.shape)
transpose = [rank - 1] + list(range(0, rank - 1))
return array.transpose(transpose)
def _mixture_stddev_np(pi_vector, mu_vector, sigma_vector):
"""Computes the standard deviation of a univariate mixture distribution.
Acts upon `np.array`s (not `tf.Tensor`s).
Args:
pi_vector: A `np.array` of mixture weights. Shape `[batch, components]`.
mu_vector: A `np.array` of means. Shape `[batch, components]`
sigma_vector: A `np.array` of stddevs. Shape `[batch, components]`.
Returns:
A `np.array` containing the batch of standard deviations.
"""
pi_vector = np.expand_dims(pi_vector, axis=1)
mean_wa = np.matmul(pi_vector, np.expand_dims(mu_vector, axis=2))
var_wa = np.matmul(pi_vector, np.expand_dims(sigma_vector**2, axis=2))
mid_term = np.matmul(pi_vector, np.expand_dims(mu_vector**2, axis=2))
mixture_variance = (
np.squeeze(var_wa) + np.squeeze(mid_term) - np.squeeze(mean_wa**2))
return np.sqrt(mixture_variance)
@contextlib.contextmanager
def _test_capture_mvndiag_sample_outputs():
"""Use monkey-patching to capture the output of an MVNDiag _call_sample_n."""
data_container = []
true_mvndiag_call_sample_n = (
ds.MultivariateNormalDiag._call_sample_n)
def _capturing_mvndiag_call_sample_n(
self, sample_shape, seed, name, **kwargs):
samples = true_mvndiag_call_sample_n(
self, sample_shape, seed, name, **kwargs)
data_container.append(samples)
return samples
ds.MultivariateNormalDiag._call_sample_n = (
_capturing_mvndiag_call_sample_n)
yield data_container
ds.MultivariateNormalDiag._call_sample_n = (
true_mvndiag_call_sample_n)
@contextlib.contextmanager
def _test_capture_normal_sample_outputs():
"""Use monkey-patching to capture the output of an Normal _call_sample_n."""
data_container = []
true_normal_call_sample_n = ds.Normal._call_sample_n
def _capturing_normal_call_sample_n(self, sample_shape, seed, name, **kwargs):
samples = true_normal_call_sample_n(
self, sample_shape, seed, name, **kwargs)
data_container.append(samples)
return samples
ds.Normal._call_sample_n = _capturing_normal_call_sample_n
yield data_container
ds.Normal._call_sample_n = true_normal_call_sample_n
def make_univariate_mixture(batch_shape, num_components, use_static_graph):
batch_shape = ops.convert_to_tensor(batch_shape, dtypes.int32)
logits = random_ops.random_uniform(
array_ops.concat((batch_shape, [num_components]), axis=0),
-1, 1, dtype=dtypes.float32) - 50.
components = [
ds.Normal(
loc=random_ops.random_normal(batch_shape),
scale=10 * random_ops.random_uniform(batch_shape))
for _ in range(num_components)
]
cat = ds.Categorical(logits, dtype=dtypes.int32)
return ds.Mixture(cat, components, use_static_graph=use_static_graph)
def make_multivariate_mixture(batch_shape, num_components, event_shape,
use_static_graph, batch_shape_tensor=None):
if batch_shape_tensor is None:
batch_shape_tensor = batch_shape
batch_shape_tensor = ops.convert_to_tensor(batch_shape_tensor, dtypes.int32)
logits = random_ops.random_uniform(
array_ops.concat((batch_shape_tensor, [num_components]), 0),
-1, 1, dtype=dtypes.float32) - 50.
logits.set_shape(
tensor_shape.TensorShape(batch_shape).concatenate(num_components))
static_batch_and_event_shape = (
tensor_shape.TensorShape(batch_shape).concatenate(event_shape))
event_shape = ops.convert_to_tensor(event_shape, dtypes.int32)
batch_and_event_shape = array_ops.concat((batch_shape_tensor, event_shape), 0)
def create_component():
loc = random_ops.random_normal(batch_and_event_shape)
scale_diag = 10 * random_ops.random_uniform(batch_and_event_shape)
loc.set_shape(static_batch_and_event_shape)
scale_diag.set_shape(static_batch_and_event_shape)
return ds.MultivariateNormalDiag(
loc=loc, scale_diag=scale_diag)
components = [create_component() for _ in range(num_components)]
cat = ds.Categorical(logits, dtype=dtypes.int32)
return ds.Mixture(cat, components, use_static_graph=use_static_graph)
class MixtureTest(test.TestCase):
use_static_graph = False
def testShapes(self):
with self.test_session():
for batch_shape in ([], [1], [2, 3, 4]):
dist = make_univariate_mixture(batch_shape, num_components=10,
use_static_graph=self.use_static_graph)
self.assertAllEqual(batch_shape, dist.batch_shape)
self.assertAllEqual(batch_shape, dist.batch_shape_tensor().eval())
self.assertAllEqual([], dist.event_shape)
self.assertAllEqual([], dist.event_shape_tensor().eval())
for event_shape in ([1], [2]):
dist = make_multivariate_mixture(
batch_shape, num_components=10, event_shape=event_shape,
use_static_graph=self.use_static_graph)
self.assertAllEqual(batch_shape, dist.batch_shape)
self.assertAllEqual(batch_shape, dist.batch_shape_tensor().eval())
self.assertAllEqual(event_shape, dist.event_shape)
self.assertAllEqual(event_shape, dist.event_shape_tensor().eval())
def testBrokenShapesStatic(self):
with self.assertRaisesWithPredicateMatch(ValueError,
r"cat.num_classes != len"):
ds.Mixture(
ds.Categorical([0.1, 0.5]), # 2 classes
[ds.Normal(loc=1.0, scale=2.0)],
use_static_graph=self.use_static_graph)
with self.assertRaisesWithPredicateMatch(
ValueError, r"\(\) and \(2,\) are not compatible"):
# The value error is raised because the batch shapes of the
# Normals are not equal. One is a scalar, the other is a
# vector of size (2,).
ds.Mixture(
ds.Categorical([-0.5, 0.5]), # scalar batch
[
ds.Normal(
loc=1.0, scale=2.0), # scalar dist
ds.Normal(
loc=[1.0, 1.0], scale=[2.0, 2.0])
],
use_static_graph=self.use_static_graph)
with self.assertRaisesWithPredicateMatch(ValueError, r"Could not infer"):
cat_logits = array_ops.placeholder(shape=[1, None], dtype=dtypes.float32)
ds.Mixture(
ds.Categorical(cat_logits),
[ds.Normal(
loc=[1.0], scale=[2.0])],
use_static_graph=self.use_static_graph)
def testBrokenShapesDynamic(self):
with self.test_session():
d0_param = array_ops.placeholder(dtype=dtypes.float32)
d1_param = array_ops.placeholder(dtype=dtypes.float32)
d = ds.Mixture(
ds.Categorical([0.1, 0.2]), [
ds.Normal(
loc=d0_param, scale=d0_param), ds.Normal(
loc=d1_param, scale=d1_param)
],
validate_args=True,
use_static_graph=self.use_static_graph)
if self.use_static_graph:
error_string = r"Shapes of all inputs must match"
else:
error_string = r"batch shape must match"
with self.assertRaisesOpError(error_string):
d.sample().eval(feed_dict={d0_param: [2.0, 3.0], d1_param: [1.0]})
with self.assertRaisesOpError(error_string):
d.sample().eval(feed_dict={d0_param: [2.0, 3.0], d1_param: 1.0})
def testBrokenTypes(self):
with self.assertRaisesWithPredicateMatch(TypeError, "Categorical"):
ds.Mixture(None, [], use_static_graph=self.use_static_graph)
cat = ds.Categorical([0.3, 0.2])
# components must be a list of distributions
with self.assertRaisesWithPredicateMatch(
TypeError, "all .* must be Distribution instances"):
ds.Mixture(cat, [None], use_static_graph=self.use_static_graph)
with self.assertRaisesWithPredicateMatch(TypeError, "same dtype"):
ds.Mixture(
cat, [
ds.Normal(loc=[1.0], scale=[2.0]),
ds.Normal(loc=[np.float16(1.0)],
scale=[np.float16(2.0)]),
], use_static_graph=self.use_static_graph)
with self.assertRaisesWithPredicateMatch(ValueError, "non-empty list"):
ds.Mixture(ds.Categorical([0.3, 0.2]), None,
use_static_graph=self.use_static_graph)
# TODO(ebrevdo): once distribution Domains have been added, add a
# test to ensure that the domains of the distributions in a
# mixture are checked for equivalence.
def testMeanUnivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=2,
use_static_graph=self.use_static_graph)
mean = dist.mean()
self.assertEqual(batch_shape, mean.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_means = [d.mean() for d in dist.components]
mean_value, cat_probs_value, dist_means_value = sess.run(
[mean, cat_probs, dist_means])
self.assertEqual(batch_shape, mean_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
true_mean = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_means_value)])
self.assertAllClose(true_mean, mean_value)
def testMeanMultivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape, num_components=2, event_shape=(4,),
use_static_graph=self.use_static_graph)
mean = dist.mean()
self.assertEqual(batch_shape + (4,), mean.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_means = [d.mean() for d in dist.components]
mean_value, cat_probs_value, dist_means_value = sess.run(
[mean, cat_probs, dist_means])
self.assertEqual(batch_shape + (4,), mean_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
# Add a new innermost dimension for broadcasting to mvn vector shape
cat_probs_value = [np.expand_dims(c_p, -1) for c_p in cat_probs_value]
true_mean = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_means_value)])
self.assertAllClose(true_mean, mean_value)
def testStddevShapeUnivariate(self):
num_components = 2
# This is the same shape test which is done in 'testMeanUnivariate'.
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=num_components,
use_static_graph=self.use_static_graph)
dev = dist.stddev()
self.assertEqual(batch_shape, dev.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_devs = [d.stddev() for d in dist.components]
dist_means = [d.mean() for d in dist.components]
res = sess.run([dev, cat_probs, dist_devs, dist_means])
dev_value, cat_probs_values, dist_devs_values, dist_means_values = res
# Manual computation of stddev.
batch_shape_res = cat_probs_values.shape[:-1]
event_shape_res = dist_devs_values[0].shape[len(batch_shape_res):]
stacked_mean_res = np.stack(dist_means_values, -1)
stacked_dev_res = np.stack(dist_devs_values, -1)
# Broadcast cat probs over event dimensions.
for _ in range(len(event_shape_res)):
cat_probs_values = np.expand_dims(cat_probs_values, len(batch_shape))
cat_probs_values = cat_probs_values + np.zeros_like(stacked_dev_res) # pylint: disable=g-no-augmented-assignment
# Perform stddev computation on a flattened batch.
flat_batch_manual_dev = _mixture_stddev_np(
np.reshape(cat_probs_values, [-1, num_components]),
np.reshape(stacked_mean_res, [-1, num_components]),
np.reshape(stacked_dev_res, [-1, num_components]))
# Reshape to full shape.
full_shape_res = list(batch_shape_res) + list(event_shape_res)
manual_dev = np.reshape(flat_batch_manual_dev, full_shape_res)
self.assertEqual(batch_shape, dev_value.shape)
self.assertAllClose(manual_dev, dev_value)
def testStddevShapeMultivariate(self):
num_components = 2
# This is the same shape test which is done in 'testMeanMultivariate'.
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape,
num_components=num_components,
event_shape=(4,),
use_static_graph=self.use_static_graph)
dev = dist.stddev()
self.assertEqual(batch_shape + (4,), dev.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_devs = [d.stddev() for d in dist.components]
dist_means = [d.mean() for d in dist.components]
res = sess.run([dev, cat_probs, dist_devs, dist_means])
dev_value, cat_probs_values, dist_devs_values, dist_means_values = res
# Manual computation of stddev.
batch_shape_res = cat_probs_values.shape[:-1]
event_shape_res = dist_devs_values[0].shape[len(batch_shape_res):]
stacked_mean_res = np.stack(dist_means_values, -1)
stacked_dev_res = np.stack(dist_devs_values, -1)
# Broadcast cat probs over event dimensions.
for _ in range(len(event_shape_res)):
cat_probs_values = np.expand_dims(cat_probs_values, len(batch_shape))
cat_probs_values = cat_probs_values + np.zeros_like(stacked_dev_res) # pylint: disable=g-no-augmented-assignment
# Perform stddev computation on a flattened batch.
flat_batch_manual_dev = _mixture_stddev_np(
np.reshape(cat_probs_values, [-1, num_components]),
np.reshape(stacked_mean_res, [-1, num_components]),
np.reshape(stacked_dev_res, [-1, num_components]))
# Reshape to full shape.
full_shape_res = list(batch_shape_res) + list(event_shape_res)
manual_dev = np.reshape(flat_batch_manual_dev, full_shape_res)
self.assertEqual(tuple(full_shape_res), dev_value.shape)
self.assertAllClose(manual_dev, dev_value)
def testSpecificStddevValue(self):
cat_probs = np.array([0.5, 0.5])
component_means = np.array([-10, 0.1])
component_devs = np.array([0.05, 2.33])
ground_truth_stddev = 5.3120805
mixture_dist = ds.Mixture(
cat=ds.Categorical(probs=cat_probs),
components=[
ds.Normal(loc=component_means[0],
scale=component_devs[0]),
ds.Normal(loc=component_means[1],
scale=component_devs[1]),
],
use_static_graph=self.use_static_graph)
mix_dev = mixture_dist.stddev()
with self.test_session() as sess:
actual_stddev = sess.run(mix_dev)
self.assertAllClose(actual_stddev, ground_truth_stddev)
def testProbScalarUnivariate(self):
with self.test_session() as sess:
dist = make_univariate_mixture(batch_shape=[], num_components=2,
use_static_graph=self.use_static_graph)
for x in [
np.array(
[1.0, 2.0], dtype=np.float32), np.array(
1.0, dtype=np.float32),
np.random.randn(3, 4).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape, p_x.get_shape())
cat_probs = nn_ops.softmax([dist.cat.logits])[0]
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape, p_x_value.shape)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbScalarMultivariate(self):
with self.test_session() as sess:
dist = make_multivariate_mixture(
batch_shape=[], num_components=2, event_shape=[3],
use_static_graph=self.use_static_graph)
for x in [
np.array(
[[-1.0, 0.0, 1.0], [0.5, 1.0, -0.3]], dtype=np.float32), np.array(
[-1.0, 0.0, 1.0], dtype=np.float32),
np.random.randn(2, 2, 3).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape[:-1], p_x.get_shape())
cat_probs = nn_ops.softmax([dist.cat.logits])[0]
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape[:-1], p_x_value.shape)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbBatchUnivariate(self):
with self.test_session() as sess:
dist = make_univariate_mixture(batch_shape=[2, 3], num_components=2,
use_static_graph=self.use_static_graph)
for x in [
np.random.randn(2, 3).astype(np.float32),
np.random.randn(4, 2, 3).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape, p_x.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape, p_x_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testProbBatchMultivariate(self):
with self.test_session() as sess:
dist = make_multivariate_mixture(
batch_shape=[2, 3], num_components=2, event_shape=[4],
use_static_graph=self.use_static_graph)
for x in [
np.random.randn(2, 3, 4).astype(np.float32),
np.random.randn(4, 2, 3, 4).astype(np.float32)
]:
p_x = dist.prob(x)
self.assertEqual(x.shape[:-1], p_x.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_probs = [d.prob(x) for d in dist.components]
p_x_value, cat_probs_value, dist_probs_value = sess.run(
[p_x, cat_probs, dist_probs])
self.assertEqual(x.shape[:-1], p_x_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
total_prob = sum(c_p_value * d_p_value
for (c_p_value, d_p_value
) in zip(cat_probs_value, dist_probs_value))
self.assertAllClose(total_prob, p_x_value)
def testSampleScalarBatchUnivariate(self):
with self.test_session() as sess:
num_components = 3
batch_shape = []
dist = make_univariate_mixture(
batch_shape=batch_shape, num_components=num_components,
use_static_graph=self.use_static_graph)
n = 4
with _test_capture_normal_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4,), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4,), sample_values.shape)
for c in range(num_components):
which_c = np.where(cat_sample_values == c)[0]
size_c = which_c.size
# Scalar Batch univariate case: batch_size == 1, rank 1
if self.use_static_graph:
which_dist_samples = dist_sample_values[c][which_c]
else:
which_dist_samples = dist_sample_values[c][:size_c]
self.assertAllClose(which_dist_samples, sample_values[which_c])
# Test that sampling with the same seed twice gives the same results.
def testSampleMultipleTimes(self):
# 5 component mixture.
logits = [-10.0, -5.0, 0.0, 5.0, 10.0]
mus = [-5.0, 0.0, 5.0, 4.0, 20.0]
sigmas = [0.1, 5.0, 3.0, 0.2, 4.0]
with self.test_session():
n = 100
random_seed.set_random_seed(654321)
components = [
ds.Normal(
loc=mu, scale=sigma) for mu, sigma in zip(mus, sigmas)
]
cat = ds.Categorical(
logits, dtype=dtypes.int32, name="cat1")
dist1 = ds.Mixture(cat, components, name="mixture1",
use_static_graph=self.use_static_graph)
samples1 = dist1.sample(n, seed=123456).eval()
random_seed.set_random_seed(654321)
components2 = [
ds.Normal(
loc=mu, scale=sigma) for mu, sigma in zip(mus, sigmas)
]
cat2 = ds.Categorical(
logits, dtype=dtypes.int32, name="cat2")
dist2 = ds.Mixture(cat2, components2, name="mixture2",
use_static_graph=self.use_static_graph)
samples2 = dist2.sample(n, seed=123456).eval()
self.assertAllClose(samples1, samples2)
def testSampleScalarBatchMultivariate(self):
with self.test_session() as sess:
num_components = 3
dist = make_multivariate_mixture(
batch_shape=[], num_components=num_components, event_shape=[2],
use_static_graph=self.use_static_graph)
n = 4
with _test_capture_mvndiag_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4, 2), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4, 2), sample_values.shape)
for c in range(num_components):
which_c = np.where(cat_sample_values == c)[0]
size_c = which_c.size
# Scalar Batch multivariate case: batch_size == 1, rank 2
if self.use_static_graph:
which_dist_samples = dist_sample_values[c][which_c, :]
else:
which_dist_samples = dist_sample_values[c][:size_c, :]
self.assertAllClose(which_dist_samples, sample_values[which_c, :])
def testSampleBatchUnivariate(self):
with self.test_session() as sess:
num_components = 3
dist = make_univariate_mixture(
batch_shape=[2, 3], num_components=num_components,
use_static_graph=self.use_static_graph)
n = 4
with _test_capture_normal_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
self.assertEqual((4, 2, 3), samples.get_shape())
cat_samples = dist.cat.sample(n, seed=123)
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples])
self.assertEqual((4, 2, 3), sample_values.shape)
for c in range(num_components):
which_c_s, which_c_b0, which_c_b1 = np.where(cat_sample_values == c)
size_c = which_c_s.size
# Batch univariate case: batch_size == [2, 3], rank 3
if self.use_static_graph:
which_dist_samples = dist_sample_values[c][which_c_s, which_c_b0,
which_c_b1]
else:
which_dist_samples = dist_sample_values[c][range(size_c), which_c_b0,
which_c_b1]
self.assertAllClose(which_dist_samples,
sample_values[which_c_s, which_c_b0, which_c_b1])
def _testSampleBatchMultivariate(self, fully_known_batch_shape):
with self.test_session() as sess:
num_components = 3
if fully_known_batch_shape:
batch_shape = [2, 3]
batch_shape_tensor = [2, 3]
else:
batch_shape = [None, 3]
batch_shape_tensor = array_ops.placeholder(dtype=dtypes.int32)
dist = make_multivariate_mixture(
batch_shape=batch_shape,
num_components=num_components, event_shape=[4],
batch_shape_tensor=batch_shape_tensor,
use_static_graph=self.use_static_graph)
n = 5
with _test_capture_mvndiag_sample_outputs() as component_samples:
samples = dist.sample(n, seed=123)
self.assertEqual(samples.dtype, dtypes.float32)
if fully_known_batch_shape:
self.assertEqual((5, 2, 3, 4), samples.get_shape())
else:
self.assertEqual([5, None, 3, 4], samples.get_shape().as_list())
cat_samples = dist.cat.sample(n, seed=123)
if fully_known_batch_shape:
feed_dict = {}
else:
feed_dict = {batch_shape_tensor: [2, 3]}
sample_values, cat_sample_values, dist_sample_values = sess.run(
[samples, cat_samples, component_samples],
feed_dict=feed_dict)
self.assertEqual((5, 2, 3, 4), sample_values.shape)
for c in range(num_components):
which_c_s, which_c_b0, which_c_b1 = np.where(cat_sample_values == c)
size_c = which_c_s.size
# Batch univariate case: batch_size == [2, 3], rank 4 (multivariate)
if self.use_static_graph:
which_dist_samples = dist_sample_values[c][which_c_s, which_c_b0,
which_c_b1, :]
else:
which_dist_samples = dist_sample_values[c][range(size_c), which_c_b0,
which_c_b1, :]
self.assertAllClose(which_dist_samples,
sample_values[which_c_s, which_c_b0, which_c_b1, :])
def testSampleBatchMultivariateFullyKnownBatchShape(self):
self._testSampleBatchMultivariate(fully_known_batch_shape=True)
def testSampleBatchMultivariateNotFullyKnownBatchShape(self):
self._testSampleBatchMultivariate(fully_known_batch_shape=False)
def testEntropyLowerBoundMultivariate(self):
with self.test_session() as sess:
for batch_shape in ((), (2,), (2, 3)):
dist = make_multivariate_mixture(
batch_shape=batch_shape, num_components=2, event_shape=(4,),
use_static_graph=self.use_static_graph)
entropy_lower_bound = dist.entropy_lower_bound()
self.assertEqual(batch_shape, entropy_lower_bound.get_shape())
cat_probs = nn_ops.softmax(dist.cat.logits)
dist_entropy = [d.entropy() for d in dist.components]
entropy_lower_bound_value, cat_probs_value, dist_entropy_value = (
sess.run([entropy_lower_bound, cat_probs, dist_entropy]))
self.assertEqual(batch_shape, entropy_lower_bound_value.shape)
cat_probs_value = _swap_first_last_axes(cat_probs_value)
# entropy_lower_bound = sum_i pi_i entropy_i
# for i in num_components, batchwise.
true_entropy_lower_bound = sum(
[c_p * m for (c_p, m) in zip(cat_probs_value, dist_entropy_value)])
self.assertAllClose(true_entropy_lower_bound, entropy_lower_bound_value)
def testCdfScalarUnivariate(self):
"""Tests CDF against scipy for a mixture of seven gaussians."""
# Construct a mixture of gaussians with seven components.
n_components = 7
# pre-softmax mixture probabilities.
mixture_weight_logits = np.random.uniform(
low=-1, high=1, size=(n_components,)).astype(np.float32)
def _scalar_univariate_softmax(x):
e_x = np.exp(x - np.max(x))
return e_x / e_x.sum()
# Construct the ds.Mixture object.
mixture_weights = _scalar_univariate_softmax(mixture_weight_logits)
means = [np.random.uniform(low=-10, high=10, size=()).astype(np.float32)
for _ in range(n_components)]
sigmas = [np.ones(shape=(), dtype=np.float32) for _ in range(n_components)]
cat_tf = ds.Categorical(probs=mixture_weights)
components_tf = [ds.Normal(loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
mixture_tf = ds.Mixture(cat=cat_tf, components=components_tf,
use_static_graph=self.use_static_graph)
x_tensor = array_ops.placeholder(shape=(), dtype=dtypes.float32)
# These are two test cases to verify.
xs_to_check = [
np.array(1.0, dtype=np.float32),
np.array(np.random.randn()).astype(np.float32)
]
# Carry out the test for both d.cdf and exp(d.log_cdf).
x_cdf_tf = mixture_tf.cdf(x_tensor)
x_log_cdf_tf = mixture_tf.log_cdf(x_tensor)
with self.test_session() as sess:
for x_feed in xs_to_check:
x_cdf_tf_result, x_log_cdf_tf_result = sess.run(
[x_cdf_tf, x_log_cdf_tf], feed_dict={x_tensor: x_feed})
# Compute the cdf with scipy.
scipy_component_cdfs = [stats.norm.cdf(x=x_feed, loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
scipy_cdf_result = np.dot(mixture_weights,
np.array(scipy_component_cdfs))
self.assertAllClose(x_cdf_tf_result, scipy_cdf_result)
self.assertAllClose(np.exp(x_log_cdf_tf_result), scipy_cdf_result)
def testCdfBatchUnivariate(self):
"""Tests against scipy for a (batch of) mixture(s) of seven gaussians."""
n_components = 7
batch_size = 5
mixture_weight_logits = np.random.uniform(
low=-1, high=1, size=(batch_size, n_components)).astype(np.float32)
def _batch_univariate_softmax(x):
e_x = np.exp(x)
e_x_sum = np.expand_dims(np.sum(e_x, axis=1), axis=1)
return e_x / np.tile(e_x_sum, reps=[1, x.shape[1]])
psize = (batch_size,)
mixture_weights = _batch_univariate_softmax(mixture_weight_logits)
means = [np.random.uniform(low=-10, high=10, size=psize).astype(np.float32)
for _ in range(n_components)]
sigmas = [np.ones(shape=psize, dtype=np.float32)
for _ in range(n_components)]
cat_tf = ds.Categorical(probs=mixture_weights)
components_tf = [ds.Normal(loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
mixture_tf = ds.Mixture(cat=cat_tf, components=components_tf,
use_static_graph=self.use_static_graph)
x_tensor = array_ops.placeholder(shape=psize, dtype=dtypes.float32)
xs_to_check = [
np.array([1.0, 5.9, -3, 0.0, 0.0], dtype=np.float32),
np.random.randn(batch_size).astype(np.float32)
]
x_cdf_tf = mixture_tf.cdf(x_tensor)
x_log_cdf_tf = mixture_tf.log_cdf(x_tensor)
with self.test_session() as sess:
for x_feed in xs_to_check:
x_cdf_tf_result, x_log_cdf_tf_result = sess.run(
[x_cdf_tf, x_log_cdf_tf],
feed_dict={x_tensor: x_feed})
# Compute the cdf with scipy.
scipy_component_cdfs = [stats.norm.cdf(x=x_feed, loc=mu, scale=sigma)
for (mu, sigma) in zip(means, sigmas)]
weights_and_cdfs = zip(np.transpose(mixture_weights, axes=[1, 0]),
scipy_component_cdfs)
final_cdf_probs_per_component = [
np.multiply(c_p_value, d_cdf_value)
for (c_p_value, d_cdf_value) in weights_and_cdfs]
scipy_cdf_result = np.sum(final_cdf_probs_per_component, axis=0)
self.assertAllClose(x_cdf_tf_result, scipy_cdf_result)
self.assertAllClose(np.exp(x_log_cdf_tf_result), scipy_cdf_result)
def testSampleBimixGamma(self):
"""Tests a bug in the underlying tf.Gamma op.
Mixture's use of dynamic partition requires `random_gamma` correctly returns
an empty `Tensor`.
"""
with self.test_session():
gm = ds.Mixture(
cat=ds.Categorical(probs=[.3, .7]),
components=[ds.Gamma(1., 2.),
ds.Gamma(2., 1.)],
use_static_graph=self.use_static_graph)
x_ = gm.sample().eval()
self.assertAllEqual([], x_.shape)
class MixtureStaticSampleTest(MixtureTest):
use_static_graph = True
class MixtureBenchmark(test.Benchmark):
use_static_graph = False
def _runSamplingBenchmark(self, name, create_distribution, use_gpu,
num_components, batch_size, num_features,
sample_size):
config = config_pb2.ConfigProto()
config.allow_soft_placement = True
np.random.seed(127)
with session.Session(config=config, graph=ops.Graph()) as sess:
random_seed.set_random_seed(0)
with ops.device("/device:GPU:0" if use_gpu else "/cpu:0"):
mixture = create_distribution(
num_components=num_components,
batch_size=batch_size,
num_features=num_features)
sample_op = mixture.sample(sample_size).op
sess.run(variables.global_variables_initializer())
reported = self.run_op_benchmark(
sess,
sample_op,
min_iters=10,
name=("%s_%s_components_%d_batch_%d_features_%d_sample_%d" %
(name, use_gpu, num_components, batch_size, num_features,
sample_size)))
logging.vlog(2, "\t".join(["%s", "%d", "%d", "%d", "%d", "%g"]) % (
use_gpu, num_components, batch_size, num_features, sample_size,
reported["wall_time"]))
def benchmarkSamplingMVNDiag(self):
logging.vlog(
2, "mvn_diag\tuse_gpu\tcomponents\tbatch\tfeatures\tsample\twall_time")
def create_distribution(batch_size, num_components, num_features):
cat = ds.Categorical(
logits=np.random.randn(batch_size, num_components))
mus = [
variables.Variable(np.random.randn(batch_size, num_features))
for _ in range(num_components)
]
sigmas = [
variables.Variable(np.random.rand(batch_size, num_features))
for _ in range(num_components)
]
components = list(
ds.MultivariateNormalDiag(
loc=mu, scale_diag=sigma) for (mu, sigma) in zip(mus, sigmas))
return ds.Mixture(cat, components, use_static_graph=self.use_static_graph)
for use_gpu in False, True:
if use_gpu and not test.is_gpu_available():
continue
for num_components in 1, 8, 16:
for batch_size in 1, 32:
for num_features in 1, 64, 512:
for sample_size in 1, 32, 128:
self._runSamplingBenchmark(
"mvn_diag",
create_distribution=create_distribution,
use_gpu=use_gpu,
num_components=num_components,
batch_size=batch_size,
num_features=num_features,
sample_size=sample_size)
def benchmarkSamplingMVNFull(self):
logging.vlog(
2, "mvn_full\tuse_gpu\tcomponents\tbatch\tfeatures\tsample\twall_time")
def psd(x):
"""Construct batch-wise PSD matrices."""
return np.stack([np.dot(np.transpose(z), z) for z in x])
def create_distribution(batch_size, num_components, num_features):
cat = ds.Categorical(
logits=np.random.randn(batch_size, num_components))
mus = [
variables.Variable(np.random.randn(batch_size, num_features))
for _ in range(num_components)
]
sigmas = [
variables.Variable(
psd(np.random.rand(batch_size, num_features, num_features)))
for _ in range(num_components)
]
components = list(
ds.MultivariateNormalTriL(
loc=mu, scale_tril=linalg_ops.cholesky(sigma))
for (mu, sigma) in zip(mus, sigmas))
return ds.Mixture(cat, components, use_static_graph=self.use_static_graph)
for use_gpu in False, True:
if use_gpu and not test.is_gpu_available():
continue
for num_components in 1, 8, 16:
for batch_size in 1, 32:
for num_features in 1, 64, 512:
for sample_size in 1, 32, 128:
self._runSamplingBenchmark(
"mvn_full",
create_distribution=create_distribution,
use_gpu=use_gpu,
num_components=num_components,
batch_size=batch_size,
num_features=num_features,
sample_size=sample_size)
class MixtureStaticSampleBenchmark(MixtureBenchmark):
use_static_graph = True
if __name__ == "__main__":
test.main()
|
apache-2.0
|
jmw7912/wat-0016-kernel-2.6.37
|
Documentation/networking/cxacru-cf.py
|
14668
|
1626
|
#!/usr/bin/env python
# Copyright 2009 Simon Arlott
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation; either version 2 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59
# Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
# Usage: cxacru-cf.py < cxacru-cf.bin
# Output: values string suitable for the sysfs adsl_config attribute
#
# Warning: cxacru-cf.bin with MD5 hash cdbac2689969d5ed5d4850f117702110
# contains mis-aligned values which will stop the modem from being able
# to make a connection. If the first and last two bytes are removed then
# the values become valid, but the modulation will be forced to ANSI
# T1.413 only which may not be appropriate.
#
# The original binary format is a packed list of le32 values.
import sys
import struct
i = 0
while True:
buf = sys.stdin.read(4)
if len(buf) == 0:
break
elif len(buf) != 4:
sys.stdout.write("\n")
sys.stderr.write("Error: read {0} not 4 bytes\n".format(len(buf)))
sys.exit(1)
if i > 0:
sys.stdout.write(" ")
sys.stdout.write("{0:x}={1}".format(i, struct.unpack("<I", buf)[0]))
i += 1
sys.stdout.write("\n")
|
gpl-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.