repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
amygdala/tensorflow-workshop | workshop_sections/getting_started/xor/xor/xor_summaries_softmax.py | 1 | 3586 | # Copyright 2016 Google Inc. All Rights Reserved.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import math
import numpy as np
import tensorflow as tf
try:
import itertools.zip as zip
except ImportError:
pass
tf.logging.set_verbosity(tf.logging.INFO)
def make_graph(features, labels, num_hidden=8):
hidden_weights = tf.Variable(tf.truncated_normal(
[2, num_hidden],
stddev=1/math.sqrt(2)
))
tf.summary.image('hidden_weights', tf.expand_dims([hidden_weights], -1))
# Shape [4, num_hidden]
hidden_activations = tf.nn.relu(tf.matmul(features, hidden_weights))
output_weights = tf.Variable(tf.truncated_normal(
[num_hidden, 2],
stddev=1/math.sqrt(num_hidden)
))
# Shape [4, 2]
logits = tf.matmul(hidden_activations, output_weights)
cross_entropy = tf.nn.sparse_softmax_cross_entropy_with_logits(logits=logits, labels=labels)
loss = tf.reduce_mean(cross_entropy)
tf.summary.scalar('loss', loss)
# Shape [4]
predictions = tf.argmax(tf.nn.softmax(logits), 1)
accuracy, update_acc = tf.contrib.metrics.streaming_accuracy(predictions, labels)
tf.summary.scalar('accuracy', accuracy)
gs = tf.Variable(0, trainable=False)
optimizer = tf.train.GradientDescentOptimizer(0.2)
grads_and_vars = optimizer.compute_gradients(loss)
gradients = list(zip(grads_and_vars))[0]
tf.summary.histogram('gradients', gradients)
train_op = optimizer.apply_gradients(grads_and_vars, global_step=gs)
return train_op, loss, gs, update_acc
def main(output_dir, summaries_every, num_steps):
graph = tf.Graph()
with graph.as_default():
features = tf.placeholder(tf.float32, shape=[4, 2])
labels = tf.placeholder(tf.int32, shape=[4])
train_op, loss, gs, update_acc = make_graph(features, labels)
init = tf.global_variables_initializer()
init_local = tf.local_variables_initializer()
summary_op = tf.summary.merge_all()
writer = tf.summary.FileWriter(output_dir, graph=graph, flush_secs=1)
with tf.Session(graph=graph) as sess:
init.run()
init_local.run()
step = 0
xy = np.array([
[True, False],
[True, True],
[False, False],
[False, True]
], dtype=np.float)
y_ = np.array([True, False, False, True], dtype=np.int32)
while step < num_steps:
_, _, step, loss_value, summaries = sess.run(
[train_op, update_acc, gs, loss, summary_op],
feed_dict={features: xy, labels: y_}
)
if step % summaries_every == 0:
writer.add_summary(summaries, global_step=step)
# tf.logging.info('Wrote summaries at step {}'.format(step))
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--num-steps', type=int, default=5000)
parser.add_argument(
'--output-dir',
help='GCS or local path for summary writing',
required=True
)
parser.add_argument('--summaries-every', type=int, default=5)
args = parser.parse_args()
print("Training for %s steps" % args.num_steps)
main(args.output_dir, args.summaries_every, args.num_steps)
| apache-2.0 |
ltilve/ChromiumGStreamerBackend | tools/telemetry/third_party/gsutilz/third_party/boto/tests/unit/ec2/test_instance.py | 114 | 11050 | #!/usr/bin/env python
import base64
from tests.compat import unittest, mock
from tests.unit import AWSMockServiceTestCase
from boto.ec2.connection import EC2Connection
DESCRIBE_INSTANCE_VPC = br"""<?xml version="1.0" encoding="UTF-8"?>
<DescribeInstancesResponse xmlns="http://ec2.amazonaws.com/doc/2012-10-01/">
<requestId>c6132c74-b524-4884-87f5-0f4bde4a9760</requestId>
<reservationSet>
<item>
<reservationId>r-72ef4a0a</reservationId>
<ownerId>184906166255</ownerId>
<groupSet/>
<instancesSet>
<item>
<instanceId>i-instance</instanceId>
<imageId>ami-1624987f</imageId>
<instanceState>
<code>16</code>
<name>running</name>
</instanceState>
<privateDnsName/>
<dnsName/>
<reason/>
<keyName>mykeypair</keyName>
<amiLaunchIndex>0</amiLaunchIndex>
<productCodes/>
<instanceType>m1.small</instanceType>
<launchTime>2012-12-14T23:48:37.000Z</launchTime>
<placement>
<availabilityZone>us-east-1d</availabilityZone>
<groupName/>
<tenancy>default</tenancy>
</placement>
<kernelId>aki-88aa75e1</kernelId>
<monitoring>
<state>disabled</state>
</monitoring>
<subnetId>subnet-0dc60667</subnetId>
<vpcId>vpc-id</vpcId>
<privateIpAddress>10.0.0.67</privateIpAddress>
<sourceDestCheck>true</sourceDestCheck>
<groupSet>
<item>
<groupId>sg-id</groupId>
<groupName>WebServerSG</groupName>
</item>
</groupSet>
<architecture>x86_64</architecture>
<rootDeviceType>ebs</rootDeviceType>
<rootDeviceName>/dev/sda1</rootDeviceName>
<blockDeviceMapping>
<item>
<deviceName>/dev/sda1</deviceName>
<ebs>
<volumeId>vol-id</volumeId>
<status>attached</status>
<attachTime>2012-12-14T23:48:43.000Z</attachTime>
<deleteOnTermination>true</deleteOnTermination>
</ebs>
</item>
</blockDeviceMapping>
<virtualizationType>paravirtual</virtualizationType>
<clientToken>foo</clientToken>
<tagSet>
<item>
<key>Name</key>
<value/>
</item>
</tagSet>
<hypervisor>xen</hypervisor>
<networkInterfaceSet>
<item>
<networkInterfaceId>eni-id</networkInterfaceId>
<subnetId>subnet-id</subnetId>
<vpcId>vpc-id</vpcId>
<description>Primary network interface</description>
<ownerId>ownerid</ownerId>
<status>in-use</status>
<privateIpAddress>10.0.0.67</privateIpAddress>
<sourceDestCheck>true</sourceDestCheck>
<groupSet>
<item>
<groupId>sg-id</groupId>
<groupName>WebServerSG</groupName>
</item>
</groupSet>
<attachment>
<attachmentId>eni-attach-id</attachmentId>
<deviceIndex>0</deviceIndex>
<status>attached</status>
<attachTime>2012-12-14T23:48:37.000Z</attachTime>
<deleteOnTermination>true</deleteOnTermination>
</attachment>
<privateIpAddressesSet>
<item>
<privateIpAddress>10.0.0.67</privateIpAddress>
<primary>true</primary>
</item>
<item>
<privateIpAddress>10.0.0.54</privateIpAddress>
<primary>false</primary>
</item>
<item>
<privateIpAddress>10.0.0.55</privateIpAddress>
<primary>false</primary>
</item>
</privateIpAddressesSet>
</item>
</networkInterfaceSet>
<ebsOptimized>false</ebsOptimized>
</item>
</instancesSet>
</item>
</reservationSet>
</DescribeInstancesResponse>
"""
RUN_INSTANCE_RESPONSE = br"""
<RunInstancesResponse xmlns="http://ec2.amazonaws.com/doc/2012-06-01/">
<requestId>ad4b83c2-f606-4c39-90c6-5dcc5be823e1</requestId>
<reservationId>r-c5cef7a7</reservationId>
<ownerId>ownerid</ownerId>
<groupSet>
<item>
<groupId>sg-id</groupId>
<groupName>SSH</groupName>
</item>
</groupSet>
<instancesSet>
<item>
<instanceId>i-ff0f1299</instanceId>
<imageId>ami-ed65ba84</imageId>
<instanceState>
<code>0</code>
<name>pending</name>
</instanceState>
<privateDnsName/>
<dnsName/>
<reason/>
<keyName>awskeypair</keyName>
<amiLaunchIndex>0</amiLaunchIndex>
<productCodes/>
<instanceType>t1.micro</instanceType>
<launchTime>2012-05-30T19:21:18.000Z</launchTime>
<placement>
<availabilityZone>us-east-1a</availabilityZone>
<groupName/>
<tenancy>default</tenancy>
</placement>
<kernelId>aki-b6aa75df</kernelId>
<monitoring>
<state>disabled</state>
</monitoring>
<groupSet>
<item>
<groupId>sg-99a710f1</groupId>
<groupName>SSH</groupName>
</item>
</groupSet>
<stateReason>
<code>pending</code>
<message>pending</message>
</stateReason>
<architecture>i386</architecture>
<rootDeviceType>ebs</rootDeviceType>
<rootDeviceName>/dev/sda1</rootDeviceName>
<blockDeviceMapping/>
<virtualizationType>paravirtual</virtualizationType>
<clientToken/>
<hypervisor>xen</hypervisor>
<networkInterfaceSet/>
<iamInstanceProfile>
<arn>arn:aws:iam::ownerid:instance-profile/myinstanceprofile</arn>
<id>iamid</id>
</iamInstanceProfile>
</item>
</instancesSet>
</RunInstancesResponse>
"""
class TestRunInstanceResponseParsing(unittest.TestCase):
def testIAMInstanceProfileParsedCorrectly(self):
ec2 = EC2Connection(aws_access_key_id='aws_access_key_id',
aws_secret_access_key='aws_secret_access_key')
mock_response = mock.Mock()
mock_response.read.return_value = RUN_INSTANCE_RESPONSE
mock_response.status = 200
ec2.make_request = mock.Mock(return_value=mock_response)
reservation = ec2.run_instances(image_id='ami-12345')
self.assertEqual(len(reservation.instances), 1)
instance = reservation.instances[0]
self.assertEqual(instance.image_id, 'ami-ed65ba84')
# iamInstanceProfile has an ID element, so we want to make sure
# that this does not map to instance.id (which should be the
# id of the ec2 instance).
self.assertEqual(instance.id, 'i-ff0f1299')
self.assertDictEqual(
instance.instance_profile,
{'arn': ('arn:aws:iam::ownerid:'
'instance-profile/myinstanceprofile'),
'id': 'iamid'})
class TestRunInstances(AWSMockServiceTestCase):
connection_class = EC2Connection
def default_body(self):
# This is a dummy response
return b"""
<DescribeLaunchConfigurationsResponse>
</DescribeLaunchConfigurationsResponse>
"""
def test_run_instances_user_data(self):
self.set_http_response(status_code=200)
response = self.service_connection.run_instances(
image_id='123456',
instance_type='m1.large',
security_groups=['group1', 'group2'],
user_data='#!/bin/bash'
)
self.assert_request_parameters({
'Action': 'RunInstances',
'ImageId': '123456',
'InstanceType': 'm1.large',
'UserData': base64.b64encode(b'#!/bin/bash').decode('utf-8'),
'MaxCount': 1,
'MinCount': 1,
'SecurityGroup.1': 'group1',
'SecurityGroup.2': 'group2',
}, ignore_params_values=[
'Version', 'AWSAccessKeyId', 'SignatureMethod', 'SignatureVersion',
'Timestamp'
])
class TestDescribeInstances(AWSMockServiceTestCase):
connection_class = EC2Connection
def default_body(self):
return DESCRIBE_INSTANCE_VPC
def test_multiple_private_ip_addresses(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_reservations()
self.assertEqual(len(api_response), 1)
instances = api_response[0].instances
self.assertEqual(len(instances), 1)
instance = instances[0]
self.assertEqual(len(instance.interfaces), 1)
interface = instance.interfaces[0]
self.assertEqual(len(interface.private_ip_addresses), 3)
addresses = interface.private_ip_addresses
self.assertEqual(addresses[0].private_ip_address, '10.0.0.67')
self.assertTrue(addresses[0].primary)
self.assertEqual(addresses[1].private_ip_address, '10.0.0.54')
self.assertFalse(addresses[1].primary)
self.assertEqual(addresses[2].private_ip_address, '10.0.0.55')
self.assertFalse(addresses[2].primary)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
buqing2009/MissionPlanner | Lib/warnings.py | 80 | 14444 | """Python part of the warnings subsystem."""
# Note: function level imports should *not* be used
# in this module as it may cause import lock deadlock.
# See bug 683658.
import linecache
import sys
import types
__all__ = ["warn", "showwarning", "formatwarning", "filterwarnings",
"resetwarnings", "catch_warnings"]
def warnpy3k(message, category=None, stacklevel=1):
"""Issue a deprecation warning for Python 3.x related changes.
Warnings are omitted unless Python is started with the -3 option.
"""
if sys.py3kwarning:
if category is None:
category = DeprecationWarning
warn(message, category, stacklevel+1)
def _show_warning(message, category, filename, lineno, file=None, line=None):
"""Hook to write a warning to a file; replace if you like."""
if file is None:
file = sys.stderr
try:
file.write(formatwarning(message, category, filename, lineno, line))
except IOError:
pass # the file (probably stderr) is invalid - this warning gets lost.
# Keep a working version around in case the deprecation of the old API is
# triggered.
showwarning = _show_warning
def formatwarning(message, category, filename, lineno, line=None):
"""Function to format a warning the standard way."""
s = "%s:%s: %s: %s\n" % (filename, lineno, category.__name__, message)
line = linecache.getline(filename, lineno) if line is None else line
if line:
line = line.strip()
s += " %s\n" % line
return s
def filterwarnings(action, message="", category=Warning, module="", lineno=0,
append=0):
"""Insert an entry into the list of warnings filters (at the front).
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'message' -- a regex that the warning message must match
'category' -- a class that the warning must be a subclass of
'module' -- a regex that the module name must match
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
import re
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(message, basestring), "message must be a string"
assert isinstance(category, (type, types.ClassType)), \
"category must be a class"
assert issubclass(category, Warning), "category must be a Warning subclass"
assert isinstance(module, basestring), "module must be a string"
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, re.compile(message, re.I), category,
re.compile(module), lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def simplefilter(action, category=Warning, lineno=0, append=0):
"""Insert a simple entry into the list of warnings filters (at the front).
A simple filter matches all modules and messages.
'action' -- one of "error", "ignore", "always", "default", "module",
or "once"
'category' -- a class that the warning must be a subclass of
'lineno' -- an integer line number, 0 matches all warnings
'append' -- if true, append to the list of filters
"""
assert action in ("error", "ignore", "always", "default", "module",
"once"), "invalid action: %r" % (action,)
assert isinstance(lineno, int) and lineno >= 0, \
"lineno must be an int >= 0"
item = (action, None, category, None, lineno)
if append:
filters.append(item)
else:
filters.insert(0, item)
def resetwarnings():
"""Clear the list of warning filters, so that no filters are active."""
filters[:] = []
class _OptionError(Exception):
"""Exception used by option processing helpers."""
pass
# Helper to process -W options passed via sys.warnoptions
def _processoptions(args):
for arg in args:
try:
_setoption(arg)
except _OptionError, msg:
print >>sys.stderr, "Invalid -W option ignored:", msg
# Helper for _processoptions()
def _setoption(arg):
import re
parts = arg.split(':')
if len(parts) > 5:
raise _OptionError("too many fields (max 5): %r" % (arg,))
while len(parts) < 5:
parts.append('')
action, message, category, module, lineno = [s.strip()
for s in parts]
action = _getaction(action)
message = re.escape(message)
category = _getcategory(category)
module = re.escape(module)
if module:
module = module + '$'
if lineno:
try:
lineno = int(lineno)
if lineno < 0:
raise ValueError
except (ValueError, OverflowError):
raise _OptionError("invalid lineno %r" % (lineno,))
else:
lineno = 0
filterwarnings(action, message, category, module, lineno)
# Helper for _setoption()
def _getaction(action):
if not action:
return "default"
if action == "all": return "always" # Alias
for a in ('default', 'always', 'ignore', 'module', 'once', 'error'):
if a.startswith(action):
return a
raise _OptionError("invalid action: %r" % (action,))
# Helper for _setoption()
def _getcategory(category):
import re
if not category:
return Warning
if re.match("^[a-zA-Z0-9_]+$", category):
try:
cat = eval(category)
except NameError:
raise _OptionError("unknown warning category: %r" % (category,))
else:
i = category.rfind(".")
module = category[:i]
klass = category[i+1:]
try:
m = __import__(module, None, None, [klass])
except ImportError:
raise _OptionError("invalid module name: %r" % (module,))
try:
cat = getattr(m, klass)
except AttributeError:
raise _OptionError("unknown warning category: %r" % (category,))
if not issubclass(cat, Warning):
raise _OptionError("invalid warning category: %r" % (category,))
return cat
# Code typically replaced by _warnings
def warn(message, category=None, stacklevel=1):
"""Issue a warning, or maybe ignore it or raise an exception."""
# Check if message is already a Warning object
if isinstance(message, Warning):
category = message.__class__
# Check category argument
if category is None:
category = UserWarning
assert issubclass(category, Warning)
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
if '__name__' in globals:
module = globals['__name__']
else:
module = "<string>"
filename = globals.get('__file__')
if filename:
fnl = filename.lower()
if fnl.endswith((".pyc", ".pyo")):
filename = filename[:-1]
else:
if module == "__main__":
try:
filename = sys.argv[0]
except AttributeError:
# embedded interpreters don't have sys.argv, see bug #839151
filename = '__main__'
if not filename:
filename = module
registry = globals.setdefault("__warningregistry__", {})
warn_explicit(message, category, filename, lineno, module, registry,
globals)
def warn_explicit(message, category, filename, lineno,
module=None, registry=None, module_globals=None):
lineno = int(lineno)
if module is None:
module = filename or "<unknown>"
if module[-3:].lower() == ".py":
module = module[:-3] # XXX What about leading pathname?
if registry is None:
registry = {}
if isinstance(message, Warning):
text = str(message)
category = message.__class__
else:
text = message
message = category(message)
key = (text, category, lineno)
# Quick test for common case
if registry.get(key):
return
# Search the filters
for item in filters:
action, msg, cat, mod, ln = item
if ((msg is None or msg.match(text)) and
issubclass(category, cat) and
(mod is None or mod.match(module)) and
(ln == 0 or lineno == ln)):
break
else:
action = defaultaction
# Early exit actions
if action == "ignore":
registry[key] = 1
return
# Prime the linecache for formatting, in case the
# "file" is actually in a zipfile or something.
linecache.getlines(filename, module_globals)
if action == "error":
raise message
# Other actions
if action == "once":
registry[key] = 1
oncekey = (text, category)
if onceregistry.get(oncekey):
return
onceregistry[oncekey] = 1
elif action == "always":
pass
elif action == "module":
registry[key] = 1
altkey = (text, category, 0)
if registry.get(altkey):
return
registry[altkey] = 1
elif action == "default":
registry[key] = 1
else:
# Unrecognized actions are errors
raise RuntimeError(
"Unrecognized action (%r) in warnings.filters:\n %s" %
(action, item))
# Print message and context
showwarning(message, category, filename, lineno)
class WarningMessage(object):
"""Holds the result of a single showwarning() call."""
_WARNING_DETAILS = ("message", "category", "filename", "lineno", "file",
"line")
def __init__(self, message, category, filename, lineno, file=None,
line=None):
local_values = locals()
for attr in self._WARNING_DETAILS:
setattr(self, attr, local_values[attr])
self._category_name = category.__name__ if category else None
def __str__(self):
return ("{message : %r, category : %r, filename : %r, lineno : %s, "
"line : %r}" % (self.message, self._category_name,
self.filename, self.lineno, self.line))
class catch_warnings(object):
"""A context manager that copies and restores the warnings filter upon
exiting the context.
The 'record' argument specifies whether warnings should be captured by a
custom implementation of warnings.showwarning() and be appended to a list
returned by the context manager. Otherwise None is returned by the context
manager. The objects appended to the list are arguments whose attributes
mirror the arguments to showwarning().
The 'module' argument is to specify an alternative module to the module
named 'warnings' and imported under that name. This argument is only useful
when testing the warnings module itself.
"""
def __init__(self, record=False, module=None):
"""Specify whether to record warnings and if an alternative module
should be used other than sys.modules['warnings'].
For compatibility with Python 3.0, please consider all arguments to be
keyword-only.
"""
self._record = record
self._module = sys.modules['warnings'] if module is None else module
self._entered = False
def __repr__(self):
args = []
if self._record:
args.append("record=True")
if self._module is not sys.modules['warnings']:
args.append("module=%r" % self._module)
name = type(self).__name__
return "%s(%s)" % (name, ", ".join(args))
def __enter__(self):
if self._entered:
raise RuntimeError("Cannot enter %r twice" % self)
self._entered = True
self._filters = self._module.filters
self._module.filters = self._filters[:]
self._showwarning = self._module.showwarning
if self._record:
log = []
def showwarning(*args, **kwargs):
log.append(WarningMessage(*args, **kwargs))
self._module.showwarning = showwarning
return log
else:
return None
def __exit__(self, *exc_info):
if not self._entered:
raise RuntimeError("Cannot exit %r without entering first" % self)
self._module.filters = self._filters
self._module.showwarning = self._showwarning
# filters contains a sequence of filter 5-tuples
# The components of the 5-tuple are:
# - an action: error, ignore, always, default, module, or once
# - a compiled regex that must match the warning message
# - a class representing the warning category
# - a compiled regex that must match the module that is being warned
# - a line number for the line being warning, or 0 to mean any line
# If either if the compiled regexs are None, match anything.
_warnings_defaults = False
try:
from _warnings import (filters, default_action, once_registry,
warn, warn_explicit)
defaultaction = default_action
onceregistry = once_registry
_warnings_defaults = True
except ImportError:
filters = []
defaultaction = "default"
onceregistry = {}
# Module initialization
_processoptions(sys.warnoptions)
if not _warnings_defaults:
silence = [ImportWarning, PendingDeprecationWarning]
# Don't silence DeprecationWarning if -3 or -Q was used.
if not sys.py3kwarning and not sys.flags.division_warning:
silence.append(DeprecationWarning)
for cls in silence:
simplefilter("ignore", category=cls)
bytes_warning = sys.flags.bytes_warning
if bytes_warning > 1:
bytes_action = "error"
elif bytes_warning:
bytes_action = "default"
else:
bytes_action = "ignore"
simplefilter(bytes_action, category=BytesWarning, append=1)
del _warnings_defaults
| gpl-3.0 |
loriab/qcdb | qcdb/periodictable.py | 2 | 78250 | #
# @BEGIN LICENSE
#
# QCDB: quantum chemistry common driver and databases
#
# Copyright (c) 2011-2017 The QCDB Developers.
#
# The copyrights for code used from other parties are included in
# the corresponding files.
#
# This file is part of QCDB.
#
# QCDB is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3.
#
# QCDB is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with QCDB; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# @END LICENSE
#
"""Elemental masses (most common isotope), symbols, and atomic numbers from psi4.
"""
_temp_element = ["GHOST", "HYDROGEN", "HELIUM", "LITHIUM", "BERYLLIUM",
"BORON", "CARBON", "NITROGEN", "OXYGEN", "FLUORINE",
"NEON", "SODIUM", "MAGNESIUM", "ALUMINUM", "SILICON",
"PHOSPHORUS", "SULFUR", "CHLORINE", "ARGON", "POTASSIUM",
"CALCIUM", "SCANDIUM", "TITANIUM", "VANADIUM", "CHROMIUM",
"MANGANESE", "IRON", "COBALT", "NICKEL", "COPPER",
"ZINC", "GALLIUM", "GERMANIUM", "ARSENIC", "SELENIUM",
"BROMINE", "KRYPTON", "RUBIDIUM", "STRONTIUM", "YTTRIUM",
"ZIRCONIUM", "NIOBIUM", "MOLYBDENUM", "TECHNETIUM", "RUTHENIUM",
"RHODIUM", "PALLADIUM", "SILVER", "CADMIUM", "INDIUM",
"TIN", "ANTIMONY", "TELLURIUM", "IODINE", "XENON",
"CESIUM", "BARIUM", "LANTHANUM", "CERIUM", "PRASEODYMIUM",
"NEODYMIUM", "PROMETHIUM", "SAMARIUM", "EUROPIUM", "GADOLINIUM",
"TERBIUM", "DYSPROSIUM", "HOLMIUM", "ERBIUM", "THULIUM",
"YTTERBIUM", "LUTETIUM", "HAFNIUM", "TANTALUM", "TUNGSTEN",
"RHENIUM", "OSMIUM", "IRIDIUM", "PLATINUM", "GOLD",
"MERCURY", "THALLIUM", "LEAD", "BISMUTH", "POLONIUM",
"ASTATINE", "RADON", "FRANCIUM", "RADIUM", "ACTINIUM",
"THORIUM", "PROTACTINIUM", "URANIUM", "NEPTUNIUM", "PLUTONIUM",
"AMERICIUM", "CURIUM", "BERKELIUM", "CALIFORNIUM", "EINSTEINIUM",
"FERMIUM", "MENDELEVIUM", "NOBELIUM", "LAWRENCIUM" "RUTHERFORDIUM",
"DUBNIUM", "SEABORGIUM", "BOHRIUM"]
_temp_symbol = ["X", "H", "HE", "LI", "BE", "B", "C", "N", "O", "F", "NE", "NA", "MG",
"AL", "SI", "P", "S", "CL", "AR", "K", "CA", "SC", "TI", "V", "CR", "MN", "FE", "CO",
"NI", "CU", "ZN", "GA", "GE", "AS", "SE", "BR", "KR", "RB", "SR", "Y", "ZR", "NB",
"MO", "TC", "RU", "RH", "PD", "AG", "CD", "IN", "SN", "SB", "TE", "I", "XE", "CS",
"BA", "LA", "CE", "PR", "ND", "PM", "SM", "EU", "GD", "TB", "DY", "HO", "ER", "TM",
"YB", "LU", "HF", "TA", "W", "RE", "OS", "IR", "PT", "AU", "HG", "TL", "PB", "BI",
"PO", "AT", "RN", "FR", "RA", "AC", "TH", "PA", "U", "NP", "PU", "AM", "CM", "BK",
"CF", "ES", "FM", "MD", "NO", "LR", "RF", "DB", "SG", "BH", "HS", "MT", "DS", "RG",
"UUB", "UUT", "UUQ", "UUP", "UUH", "UUS", "UUO"]
_temp_z = list(range(0, 108))
_temp_mass = [
0., 1.00782503207, 4.00260325415, 7.016004548, 9.012182201, 11.009305406,
12, 14.00307400478, 15.99491461956, 18.998403224, 19.99244017542,
22.98976928087, 23.985041699, 26.981538627, 27.97692653246, 30.973761629,
31.972070999, 34.968852682, 39.96238312251, 38.963706679, 39.962590983,
44.955911909, 47.947946281, 50.943959507, 51.940507472, 54.938045141,
55.934937475, 58.933195048, 57.935342907, 62.929597474, 63.929142222,
68.925573587, 73.921177767, 74.921596478, 79.916521271, 78.918337087,
85.910610729, 84.911789737, 87.905612124, 88.905848295, 89.904704416,
92.906378058, 97.905408169, 98.906254747, 101.904349312, 102.905504292,
105.903485715, 106.90509682, 113.90335854, 114.903878484, 119.902194676,
120.903815686, 129.906224399, 126.904472681, 131.904153457, 132.905451932,
137.905247237, 138.906353267, 139.905438706, 140.907652769, 141.907723297,
144.912749023, 151.919732425, 152.921230339, 157.924103912, 158.925346757,
163.929174751, 164.93032207, 165.930293061, 168.93421325, 173.938862089,
174.940771819, 179.946549953, 180.947995763, 183.950931188, 186.955753109,
191.96148069, 192.96292643, 194.964791134, 196.966568662, 201.970643011,
204.974427541, 207.976652071, 208.980398734, 208.982430435, 210.987496271,
222.017577738, 222.01755173, 228.031070292, 227.027752127, 232.038055325,
231.03588399, 238.050788247, 237.048173444, 242.058742611, 243.06138108,
247.07035354, 247.07030708, 251.079586788, 252.082978512, 257.095104724,
258.098431319, 255.093241131, 260.105504, 263.112547, 255.107398, 259.114500,
262.122892, 263.128558, 265.136151, 281.162061, 272.153615, 283.171792, 283.176451,
285.183698, 287.191186, 292.199786, 291.206564, 293.214670]
_temp_iso_symbol = [
"H", "H1", "H2", "D", "H3", "T", "H4", "H5", "H6", "H7", "HE", "HE3", "HE4",
"HE5", "HE6", "HE7", "HE8", "HE9", "HE10", "LI", "LI3", "LI4", "LI5", "LI6",
"LI7", "LI8", "LI9", "LI10", "LI11", "LI12", "BE", "BE5", "BE6", "BE7", "BE8",
"BE9", "BE10", "BE11", "BE12", "BE13", "BE14", "BE15", "BE16", "B", "B6", "B7",
"B8", "B9", "B10", "B11", "B12", "B13", "B14", "B15", "B16", "B17", "B18", "B19",
"C", "C8", "C9", "C10", "C11", "C12", "C13", "C14", "C15", "C16", "C17", "C18",
"C19", "C20", "C21", "C22", "N", "N10", "N11", "N12", "N13", "N14", "N15", "N16",
"N17", "N18", "N19", "N20", "N21", "N22", "N23", "N24", "N25", "O", "O12", "O13",
"O14", "O15", "O16", "O17", "O18", "O19", "O20", "O21", "O22", "O23", "O24",
"O25", "O26", "O27", "O28", "F", "F14", "F15", "F16", "F17", "F18", "F19", "F20",
"F21", "F22", "F23", "F24", "F25", "F26", "F27", "F28", "F29", "F30", "F31",
"NE", "NE16", "NE17", "NE18", "NE19", "NE20", "NE21", "NE22", "NE23", "NE24",
"NE25", "NE26", "NE27", "NE28", "NE29", "NE30", "NE31", "NE32", "NE33", "NE34",
"NA", "NA18", "NA19", "NA20", "NA21", "NA22", "NA23", "NA24", "NA25", "NA26",
"NA27", "NA28", "NA29", "NA30", "NA31", "NA32", "NA33", "NA34", "NA35", "NA36",
"NA37", "MG", "MG19", "MG20", "MG21", "MG22", "MG23", "MG24", "MG25", "MG26",
"MG27", "MG28", "MG29", "MG30", "MG31", "MG32", "MG33", "MG34", "MG35", "MG36",
"MG37", "MG38", "MG39", "MG40", "AL", "AL21", "AL22", "AL23", "AL24", "AL25",
"AL26", "AL27", "AL28", "AL29", "AL30", "AL31", "AL32", "AL33", "AL34", "AL35",
"AL36", "AL37", "AL38", "AL39", "AL40", "AL41", "AL42", "SI", "SI22", "SI23",
"SI24", "SI25", "SI26", "SI27", "SI28", "SI29", "SI30", "SI31", "SI32", "SI33",
"SI34", "SI35", "SI36", "SI37", "SI38", "SI39", "SI40", "SI41", "SI42", "SI43",
"SI44", "P", "P24", "P25", "P26", "P27", "P28", "P29", "P30", "P31", "P32",
"P33", "P34", "P35", "P36", "P37", "P38", "P39", "P40", "P41", "P42", "P43",
"P44", "P45", "P46", "S", "S26", "S27", "S28", "S29", "S30", "S31", "S32", "S33",
"S34", "S35", "S36", "S37", "S38", "S39", "S40", "S41", "S42", "S43", "S44",
"S45", "S46", "S47", "S48", "S49", "CL", "CL28", "CL29", "CL30", "CL31", "CL32",
"CL33", "CL34", "CL35", "CL36", "CL37", "CL38", "CL39", "CL40", "CL41", "CL42",
"CL43", "CL44", "CL45", "CL46", "CL47", "CL48", "CL49", "CL50", "CL51", "AR",
"AR30", "AR31", "AR32", "AR33", "AR34", "AR35", "AR36", "AR37", "AR38", "AR39",
"AR40", "AR41", "AR42", "AR43", "AR44", "AR45", "AR46", "AR47", "AR48", "AR49",
"AR50", "AR51", "AR52", "AR53", "K", "K32", "K33", "K34", "K35", "K36", "K37",
"K38", "K39", "K40", "K41", "K42", "K43", "K44", "K45", "K46", "K47", "K48",
"K49", "K50", "K51", "K52", "K53", "K54", "K55", "CA", "CA34", "CA35", "CA36",
"CA37", "CA38", "CA39", "CA40", "CA41", "CA42", "CA43", "CA44", "CA45", "CA46",
"CA47", "CA48", "CA49", "CA50", "CA51", "CA52", "CA53", "CA54", "CA55", "CA56",
"CA57", "SC", "SC36", "SC37", "SC38", "SC39", "SC40", "SC41", "SC42", "SC43",
"SC44", "SC45", "SC46", "SC47", "SC48", "SC49", "SC50", "SC51", "SC52", "SC53",
"SC54", "SC55", "SC56", "SC57", "SC58", "SC59", "SC60", "TI", "TI38", "TI39",
"TI40", "TI41", "TI42", "TI43", "TI44", "TI45", "TI46", "TI47", "TI48", "TI49",
"TI50", "TI51", "TI52", "TI53", "TI54", "TI55", "TI56", "TI57", "TI58", "TI59",
"TI60", "TI61", "TI62", "TI63", "V", "V40", "V41", "V42", "V43", "V44", "V45",
"V46", "V47", "V48", "V49", "V50", "V51", "V52", "V53", "V54", "V55", "V56",
"V57", "V58", "V59", "V60", "V61", "V62", "V63", "V64", "V65", "CR", "CR42",
"CR43", "CR44", "CR45", "CR46", "CR47", "CR48", "CR49", "CR50", "CR51", "CR52",
"CR53", "CR54", "CR55", "CR56", "CR57", "CR58", "CR59", "CR60", "CR61", "CR62",
"CR63", "CR64", "CR65", "CR66", "CR67", "MN", "MN44", "MN45", "MN46", "MN47",
"MN48", "MN49", "MN50", "MN51", "MN52", "MN53", "MN54", "MN55", "MN56", "MN57",
"MN58", "MN59", "MN60", "MN61", "MN62", "MN63", "MN64", "MN65", "MN66", "MN67",
"MN68", "MN69", "FE", "FE45", "FE46", "FE47", "FE48", "FE49", "FE50", "FE51",
"FE52", "FE53", "FE54", "FE55", "FE56", "FE57", "FE58", "FE59", "FE60", "FE61",
"FE62", "FE63", "FE64", "FE65", "FE66", "FE67", "FE68", "FE69", "FE70", "FE71",
"FE72", "CO", "CO47", "CO48", "CO49", "CO50", "CO51", "CO52", "CO53", "CO54",
"CO55", "CO56", "CO57", "CO58", "CO59", "CO60", "CO61", "CO62", "CO63", "CO64",
"CO65", "CO66", "CO67", "CO68", "CO69", "CO70", "CO71", "CO72", "CO73", "CO74",
"CO75", "NI", "NI48", "NI49", "NI50", "NI51", "NI52", "NI53", "NI54", "NI55",
"NI56", "NI57", "NI58", "NI59", "NI60", "NI61", "NI62", "NI63", "NI64", "NI65",
"NI66", "NI67", "NI68", "NI69", "NI70", "NI71", "NI72", "NI73", "NI74", "NI75",
"NI76", "NI77", "NI78", "CU", "CU52", "CU53", "CU54", "CU55", "CU56", "CU57",
"CU58", "CU59", "CU60", "CU61", "CU62", "CU63", "CU64", "CU65", "CU66", "CU67",
"CU68", "CU69", "CU70", "CU71", "CU72", "CU73", "CU74", "CU75", "CU76", "CU77",
"CU78", "CU79", "CU80", "ZN", "ZN54", "ZN55", "ZN56", "ZN57", "ZN58", "ZN59",
"ZN60", "ZN61", "ZN62", "ZN63", "ZN64", "ZN65", "ZN66", "ZN67", "ZN68", "ZN69",
"ZN70", "ZN71", "ZN72", "ZN73", "ZN74", "ZN75", "ZN76", "ZN77", "ZN78", "ZN79",
"ZN80", "ZN81", "ZN82", "ZN83", "GA", "GA56", "GA57", "GA58", "GA59", "GA60",
"GA61", "GA62", "GA63", "GA64", "GA65", "GA66", "GA67", "GA68", "GA69", "GA70",
"GA71", "GA72", "GA73", "GA74", "GA75", "GA76", "GA77", "GA78", "GA79", "GA80",
"GA81", "GA82", "GA83", "GA84", "GA85", "GA86", "GE", "GE58", "GE59", "GE60",
"GE61", "GE62", "GE63", "GE64", "GE65", "GE66", "GE67", "GE68", "GE69", "GE70",
"GE71", "GE72", "GE73", "GE74", "GE75", "GE76", "GE77", "GE78", "GE79", "GE80",
"GE81", "GE82", "GE83", "GE84", "GE85", "GE86", "GE87", "GE88", "GE89", "AS",
"AS60", "AS61", "AS62", "AS63", "AS64", "AS65", "AS66", "AS67", "AS68", "AS69",
"AS70", "AS71", "AS72", "AS73", "AS74", "AS75", "AS76", "AS77", "AS78", "AS79",
"AS80", "AS81", "AS82", "AS83", "AS84", "AS85", "AS86", "AS87", "AS88", "AS89",
"AS90", "AS91", "AS92", "SE", "SE65", "SE66", "SE67", "SE68", "SE69", "SE70",
"SE71", "SE72", "SE73", "SE74", "SE75", "SE76", "SE77", "SE78", "SE79", "SE80",
"SE81", "SE82", "SE83", "SE84", "SE85", "SE86", "SE87", "SE88", "SE89", "SE90",
"SE91", "SE92", "SE93", "SE94", "BR", "BR67", "BR68", "BR69", "BR70", "BR71",
"BR72", "BR73", "BR74", "BR75", "BR76", "BR77", "BR78", "BR79", "BR80", "BR81",
"BR82", "BR83", "BR84", "BR85", "BR86", "BR87", "BR88", "BR89", "BR90", "BR91",
"BR92", "BR93", "BR94", "BR95", "BR96", "BR97", "KR", "KR69", "KR70", "KR71",
"KR72", "KR73", "KR74", "KR75", "KR76", "KR77", "KR78", "KR79", "KR80", "KR81",
"KR82", "KR83", "KR84", "KR85", "KR86", "KR87", "KR88", "KR89", "KR90", "KR91",
"KR92", "KR93", "KR94", "KR95", "KR96", "KR97", "KR98", "KR99", "KR100", "RB",
"RB71", "RB72", "RB73", "RB74", "RB75", "RB76", "RB77", "RB78", "RB79", "RB80",
"RB81", "RB82", "RB83", "RB84", "RB85", "RB86", "RB87", "RB88", "RB89", "RB90",
"RB91", "RB92", "RB93", "RB94", "RB95", "RB96", "RB97", "RB98", "RB99",
"RB100", "RB101", "RB102", "SR", "SR73", "SR74", "SR75", "SR76", "SR77",
"SR78", "SR79", "SR80", "SR81", "SR82", "SR83", "SR84", "SR85", "SR86", "SR87",
"SR88", "SR89", "SR90", "SR91", "SR92", "SR93", "SR94", "SR95", "SR96", "SR97",
"SR98", "SR99", "SR100", "SR101", "SR102", "SR103", "SR104", "SR105", "Y",
"Y76", "Y77", "Y78", "Y79", "Y80", "Y81", "Y82", "Y83", "Y84", "Y85", "Y86",
"Y87", "Y88", "Y89", "Y90", "Y91", "Y92", "Y93", "Y94", "Y95", "Y96", "Y97",
"Y98", "Y99", "Y100", "Y101", "Y102", "Y103", "Y104", "Y105", "Y106", "Y107",
"Y108", "ZR", "ZR78", "ZR79", "ZR80", "ZR81", "ZR82", "ZR83", "ZR84", "ZR85",
"ZR86", "ZR87", "ZR88", "ZR89", "ZR90", "ZR91", "ZR92", "ZR93", "ZR94", "ZR95",
"ZR96", "ZR97", "ZR98", "ZR99", "ZR100", "ZR101", "ZR102", "ZR103", "ZR104",
"ZR105", "ZR106", "ZR107", "ZR108", "ZR109", "ZR110", "NB", "NB81", "NB82",
"NB83", "NB84", "NB85", "NB86", "NB87", "NB88", "NB89", "NB90", "NB91", "NB92",
"NB93", "NB94", "NB95", "NB96", "NB97", "NB98", "NB99", "NB100", "NB101",
"NB102", "NB103", "NB104", "NB105", "NB106", "NB107", "NB108", "NB109",
"NB110", "NB111", "NB112", "NB113", "MO", "MO83", "MO84", "MO85", "MO86",
"MO87", "MO88", "MO89", "MO90", "MO91", "MO92", "MO93", "MO94", "MO95", "MO96",
"MO97", "MO98", "MO99", "MO100", "MO101", "MO102", "MO103", "MO104", "MO105",
"MO106", "MO107", "MO108", "MO109", "MO110", "MO111", "MO112", "MO113",
"MO114", "MO115", "TC", "TC85", "TC86", "TC87", "TC88", "TC89", "TC90", "TC91",
"TC92", "TC93", "TC94", "TC95", "TC96", "TC97", "TC98", "TC99", "TC100",
"TC101", "TC102", "TC103", "TC104", "TC105", "TC106", "TC107", "TC108",
"TC109", "TC110", "TC111", "TC112", "TC113", "TC114", "TC115", "TC116",
"TC117", "TC118", "RU", "RU87", "RU88", "RU89", "RU90", "RU91", "RU92", "RU93",
"RU94", "RU95", "RU96", "RU97", "RU98", "RU99", "RU100", "RU101", "RU102",
"RU103", "RU104", "RU105", "RU106", "RU107", "RU108", "RU109", "RU110",
"RU111", "RU112", "RU113", "RU114", "RU115", "RU116", "RU117", "RU118",
"RU119", "RU120", "RH", "RH89", "RH90", "RH91", "RH92", "RH93", "RH94", "RH95",
"RH96", "RH97", "RH98", "RH99", "RH100", "RH101", "RH102", "RH103", "RH104",
"RH105", "RH106", "RH107", "RH108", "RH109", "RH110", "RH111", "RH112",
"RH113", "RH114", "RH115", "RH116", "RH117", "RH118", "RH119", "RH120",
"RH121", "RH122", "PD", "PD91", "PD92", "PD93", "PD94", "PD95", "PD96", "PD97",
"PD98", "PD99", "PD100", "PD101", "PD102", "PD103", "PD104", "PD105", "PD106",
"PD107", "PD108", "PD109", "PD110", "PD111", "PD112", "PD113", "PD114",
"PD115", "PD116", "PD117", "PD118", "PD119", "PD120", "PD121", "PD122",
"PD123", "PD124", "AG", "AG93", "AG94", "AG95", "AG96", "AG97", "AG98", "AG99",
"AG100", "AG101", "AG102", "AG103", "AG104", "AG105", "AG106", "AG107",
"AG108", "AG109", "AG110", "AG111", "AG112", "AG113", "AG114", "AG115",
"AG116", "AG117", "AG118", "AG119", "AG120", "AG121", "AG122", "AG123",
"AG124", "AG125", "AG126", "AG127", "AG128", "AG129", "AG130", "CD", "CD95",
"CD96", "CD97", "CD98", "CD99", "CD100", "CD101", "CD102", "CD103", "CD104",
"CD105", "CD106", "CD107", "CD108", "CD109", "CD110", "CD111", "CD112",
"CD113", "CD114", "CD115", "CD116", "CD117", "CD118", "CD119", "CD120",
"CD121", "CD122", "CD123", "CD124", "CD125", "CD126", "CD127", "CD128",
"CD129", "CD130", "CD131", "CD132", "IN", "IN97", "IN98", "IN99", "IN100",
"IN101", "IN102", "IN103", "IN104", "IN105", "IN106", "IN107", "IN108",
"IN109", "IN110", "IN111", "IN112", "IN113", "IN114", "IN115", "IN116",
"IN117", "IN118", "IN119", "IN120", "IN121", "IN122", "IN123", "IN124",
"IN125", "IN126", "IN127", "IN128", "IN129", "IN130", "IN131", "IN132",
"IN133", "IN134", "IN135", "SN", "SN99", "SN100", "SN101", "SN102", "SN103",
"SN104", "SN105", "SN106", "SN107", "SN108", "SN109", "SN110", "SN111",
"SN112", "SN113", "SN114", "SN115", "SN116", "SN117", "SN118", "SN119",
"SN120", "SN121", "SN122", "SN123", "SN124", "SN125", "SN126", "SN127",
"SN128", "SN129", "SN130", "SN131", "SN132", "SN133", "SN134", "SN135",
"SN136", "SN137", "SB", "SB103", "SB104", "SB105", "SB106", "SB107", "SB108",
"SB109", "SB110", "SB111", "SB112", "SB113", "SB114", "SB115", "SB116",
"SB117", "SB118", "SB119", "SB120", "SB121", "SB122", "SB123", "SB124",
"SB125", "SB126", "SB127", "SB128", "SB129", "SB130", "SB131", "SB132",
"SB133", "SB134", "SB135", "SB136", "SB137", "SB138", "SB139", "TE", "TE105",
"TE106", "TE107", "TE108", "TE109", "TE110", "TE111", "TE112", "TE113",
"TE114", "TE115", "TE116", "TE117", "TE118", "TE119", "TE120", "TE121",
"TE122", "TE123", "TE124", "TE125", "TE126", "TE127", "TE128", "TE129",
"TE130", "TE131", "TE132", "TE133", "TE134", "TE135", "TE136", "TE137",
"TE138", "TE139", "TE140", "TE141", "TE142", "I", "I108", "I109", "I110",
"I111", "I112", "I113", "I114", "I115", "I116", "I117", "I118", "I119", "I120",
"I121", "I122", "I123", "I124", "I125", "I126", "I127", "I128", "I129", "I130",
"I131", "I132", "I133", "I134", "I135", "I136", "I137", "I138", "I139", "I140",
"I141", "I142", "I143", "I144", "XE", "XE110", "XE111", "XE112", "XE113",
"XE114", "XE115", "XE116", "XE117", "XE118", "XE119", "XE120", "XE121",
"XE122", "XE123", "XE124", "XE125", "XE126", "XE127", "XE128", "XE129",
"XE130", "XE131", "XE132", "XE133", "XE134", "XE135", "XE136", "XE137",
"XE138", "XE139", "XE140", "XE141", "XE142", "XE143", "XE144", "XE145",
"XE146", "XE147", "CS", "CS112", "CS113", "CS114", "CS115", "CS116", "CS117",
"CS118", "CS119", "CS120", "CS121", "CS122", "CS123", "CS124", "CS125",
"CS126", "CS127", "CS128", "CS129", "CS130", "CS131", "CS132", "CS133",
"CS134", "CS135", "CS136", "CS137", "CS138", "CS139", "CS140", "CS141",
"CS142", "CS143", "CS144", "CS145", "CS146", "CS147", "CS148", "CS149",
"CS150", "CS151", "BA", "BA114", "BA115", "BA116", "BA117", "BA118", "BA119",
"BA120", "BA121", "BA122", "BA123", "BA124", "BA125", "BA126", "BA127",
"BA128", "BA129", "BA130", "BA131", "BA132", "BA133", "BA134", "BA135",
"BA136", "BA137", "BA138", "BA139", "BA140", "BA141", "BA142", "BA143",
"BA144", "BA145", "BA146", "BA147", "BA148", "BA149", "BA150", "BA151",
"BA152", "BA153", "LA", "LA117", "LA118", "LA119", "LA120", "LA121", "LA122",
"LA123", "LA124", "LA125", "LA126", "LA127", "LA128", "LA129", "LA130",
"LA131", "LA132", "LA133", "LA134", "LA135", "LA136", "LA137", "LA138",
"LA139", "LA140", "LA141", "LA142", "LA143", "LA144", "LA145", "LA146",
"LA147", "LA148", "LA149", "LA150", "LA151", "LA152", "LA153", "LA154",
"LA155", "CE", "CE119", "CE120", "CE121", "CE122", "CE123", "CE124", "CE125",
"CE126", "CE127", "CE128", "CE129", "CE130", "CE131", "CE132", "CE133",
"CE134", "CE135", "CE136", "CE137", "CE138", "CE139", "CE140", "CE141",
"CE142", "CE143", "CE144", "CE145", "CE146", "CE147", "CE148", "CE149",
"CE150", "CE151", "CE152", "CE153", "CE154", "CE155", "CE156", "CE157", "PR",
"PR121", "PR122", "PR123", "PR124", "PR125", "PR126", "PR127", "PR128",
"PR129", "PR130", "PR131", "PR132", "PR133", "PR134", "PR135", "PR136",
"PR137", "PR138", "PR139", "PR140", "PR141", "PR142", "PR143", "PR144",
"PR145", "PR146", "PR147", "PR148", "PR149", "PR150", "PR151", "PR152",
"PR153", "PR154", "PR155", "PR156", "PR157", "PR158", "PR159", "ND", "ND124",
"ND125", "ND126", "ND127", "ND128", "ND129", "ND130", "ND131", "ND132",
"ND133", "ND134", "ND135", "ND136", "ND137", "ND138", "ND139", "ND140",
"ND141", "ND142", "ND143", "ND144", "ND145", "ND146", "ND147", "ND148",
"ND149", "ND150", "ND151", "ND152", "ND153", "ND154", "ND155", "ND156",
"ND157", "ND158", "ND159", "ND160", "ND161", "PM", "PM126", "PM127", "PM128",
"PM129", "PM130", "PM131", "PM132", "PM133", "PM134", "PM135", "PM136",
"PM137", "PM138", "PM139", "PM140", "PM141", "PM142", "PM143", "PM144",
"PM145", "PM146", "PM147", "PM148", "PM149", "PM150", "PM151", "PM152",
"PM153", "PM154", "PM155", "PM156", "PM157", "PM158", "PM159", "PM160",
"PM161", "PM162", "PM163", "SM", "SM128", "SM129", "SM130", "SM131", "SM132",
"SM133", "SM134", "SM135", "SM136", "SM137", "SM138", "SM139", "SM140",
"SM141", "SM142", "SM143", "SM144", "SM145", "SM146", "SM147", "SM148",
"SM149", "SM150", "SM151", "SM152", "SM153", "SM154", "SM155", "SM156",
"SM157", "SM158", "SM159", "SM160", "SM161", "SM162", "SM163", "SM164",
"SM165", "EU", "EU130", "EU131", "EU132", "EU133", "EU134", "EU135", "EU136",
"EU137", "EU138", "EU139", "EU140", "EU141", "EU142", "EU143", "EU144",
"EU145", "EU146", "EU147", "EU148", "EU149", "EU150", "EU151", "EU152",
"EU153", "EU154", "EU155", "EU156", "EU157", "EU158", "EU159", "EU160",
"EU161", "EU162", "EU163", "EU164", "EU165", "EU166", "EU167", "GD", "GD134",
"GD135", "GD136", "GD137", "GD138", "GD139", "GD140", "GD141", "GD142",
"GD143", "GD144", "GD145", "GD146", "GD147", "GD148", "GD149", "GD150",
"GD151", "GD152", "GD153", "GD154", "GD155", "GD156", "GD157", "GD158",
"GD159", "GD160", "GD161", "GD162", "GD163", "GD164", "GD165", "GD166",
"GD167", "GD168", "GD169", "TB", "TB136", "TB137", "TB138", "TB139", "TB140",
"TB141", "TB142", "TB143", "TB144", "TB145", "TB146", "TB147", "TB148",
"TB149", "TB150", "TB151", "TB152", "TB153", "TB154", "TB155", "TB156",
"TB157", "TB158", "TB159", "TB160", "TB161", "TB162", "TB163", "TB164",
"TB165", "TB166", "TB167", "TB168", "TB169", "TB170", "TB171", "DY", "DY138",
"DY139", "DY140", "DY141", "DY142", "DY143", "DY144", "DY145", "DY146",
"DY147", "DY148", "DY149", "DY150", "DY151", "DY152", "DY153", "DY154",
"DY155", "DY156", "DY157", "DY158", "DY159", "DY160", "DY161", "DY162",
"DY163", "DY164", "DY165", "DY166", "DY167", "DY168", "DY169", "DY170",
"DY171", "DY172", "DY173", "HO", "HO140", "HO141", "HO142", "HO143", "HO144",
"HO145", "HO146", "HO147", "HO148", "HO149", "HO150", "HO151", "HO152",
"HO153", "HO154", "HO155", "HO156", "HO157", "HO158", "HO159", "HO160",
"HO161", "HO162", "HO163", "HO164", "HO165", "HO166", "HO167", "HO168",
"HO169", "HO170", "HO171", "HO172", "HO173", "HO174", "HO175", "ER", "ER143",
"ER144", "ER145", "ER146", "ER147", "ER148", "ER149", "ER150", "ER151",
"ER152", "ER153", "ER154", "ER155", "ER156", "ER157", "ER158", "ER159",
"ER160", "ER161", "ER162", "ER163", "ER164", "ER165", "ER166", "ER167",
"ER168", "ER169", "ER170", "ER171", "ER172", "ER173", "ER174", "ER175",
"ER176", "ER177", "TM", "TM145", "TM146", "TM147", "TM148", "TM149", "TM150",
"TM151", "TM152", "TM153", "TM154", "TM155", "TM156", "TM157", "TM158",
"TM159", "TM160", "TM161", "TM162", "TM163", "TM164", "TM165", "TM166",
"TM167", "TM168", "TM169", "TM170", "TM171", "TM172", "TM173", "TM174",
"TM175", "TM176", "TM177", "TM178", "TM179", "YB", "YB148", "YB149", "YB150",
"YB151", "YB152", "YB153", "YB154", "YB155", "YB156", "YB157", "YB158",
"YB159", "YB160", "YB161", "YB162", "YB163", "YB164", "YB165", "YB166",
"YB167", "YB168", "YB169", "YB170", "YB171", "YB172", "YB173", "YB174",
"YB175", "YB176", "YB177", "YB178", "YB179", "YB180", "YB181", "LU", "LU150",
"LU151", "LU152", "LU153", "LU154", "LU155", "LU156", "LU157", "LU158",
"LU159", "LU160", "LU161", "LU162", "LU163", "LU164", "LU165", "LU166",
"LU167", "LU168", "LU169", "LU170", "LU171", "LU172", "LU173", "LU174",
"LU175", "LU176", "LU177", "LU178", "LU179", "LU180", "LU181", "LU182",
"LU183", "LU184", "HF", "HF153", "HF154", "HF155", "HF156", "HF157", "HF158",
"HF159", "HF160", "HF161", "HF162", "HF163", "HF164", "HF165", "HF166",
"HF167", "HF168", "HF169", "HF170", "HF171", "HF172", "HF173", "HF174",
"HF175", "HF176", "HF177", "HF178", "HF179", "HF180", "HF181", "HF182",
"HF183", "HF184", "HF185", "HF186", "HF187", "HF188", "TA", "TA155", "TA156",
"TA157", "TA158", "TA159", "TA160", "TA161", "TA162", "TA163", "TA164",
"TA165", "TA166", "TA167", "TA168", "TA169", "TA170", "TA171", "TA172",
"TA173", "TA174", "TA175", "TA176", "TA177", "TA178", "TA179", "TA180",
"TA181", "TA182", "TA183", "TA184", "TA185", "TA186", "TA187", "TA188",
"TA189", "TA190", "W", "W158", "W159", "W160", "W161", "W162", "W163", "W164",
"W165", "W166", "W167", "W168", "W169", "W170", "W171", "W172", "W173", "W174",
"W175", "W176", "W177", "W178", "W179", "W180", "W181", "W182", "W183", "W184",
"W185", "W186", "W187", "W188", "W189", "W190", "W191", "W192", "RE", "RE160",
"RE161", "RE162", "RE163", "RE164", "RE165", "RE166", "RE167", "RE168",
"RE169", "RE170", "RE171", "RE172", "RE173", "RE174", "RE175", "RE176",
"RE177", "RE178", "RE179", "RE180", "RE181", "RE182", "RE183", "RE184",
"RE185", "RE186", "RE187", "RE188", "RE189", "RE190", "RE191", "RE192",
"RE193", "RE194", "OS", "OS162", "OS163", "OS164", "OS165", "OS166", "OS167",
"OS168", "OS169", "OS170", "OS171", "OS172", "OS173", "OS174", "OS175",
"OS176", "OS177", "OS178", "OS179", "OS180", "OS181", "OS182", "OS183",
"OS184", "OS185", "OS186", "OS187", "OS188", "OS189", "OS190", "OS191",
"OS192", "OS193", "OS194", "OS195", "OS196", "IR", "IR164", "IR165", "IR166",
"IR167", "IR168", "IR169", "IR170", "IR171", "IR172", "IR173", "IR174",
"IR175", "IR176", "IR177", "IR178", "IR179", "IR180", "IR181", "IR182",
"IR183", "IR184", "IR185", "IR186", "IR187", "IR188", "IR189", "IR190",
"IR191", "IR192", "IR193", "IR194", "IR195", "IR196", "IR197", "IR198",
"IR199", "PT", "PT166", "PT167", "PT168", "PT169", "PT170", "PT171", "PT172",
"PT173", "PT174", "PT175", "PT176", "PT177", "PT178", "PT179", "PT180",
"PT181", "PT182", "PT183", "PT184", "PT185", "PT186", "PT187", "PT188",
"PT189", "PT190", "PT191", "PT192", "PT193", "PT194", "PT195", "PT196",
"PT197", "PT198", "PT199", "PT200", "PT201", "PT202", "AU", "AU169", "AU170",
"AU171", "AU172", "AU173", "AU174", "AU175", "AU176", "AU177", "AU178",
"AU179", "AU180", "AU181", "AU182", "AU183", "AU184", "AU185", "AU186",
"AU187", "AU188", "AU189", "AU190", "AU191", "AU192", "AU193", "AU194",
"AU195", "AU196", "AU197", "AU198", "AU199", "AU200", "AU201", "AU202",
"AU203", "AU204", "AU205", "HG", "HG171", "HG172", "HG173", "HG174", "HG175",
"HG176", "HG177", "HG178", "HG179", "HG180", "HG181", "HG182", "HG183",
"HG184", "HG185", "HG186", "HG187", "HG188", "HG189", "HG190", "HG191",
"HG192", "HG193", "HG194", "HG195", "HG196", "HG197", "HG198", "HG199",
"HG200", "HG201", "HG202", "HG203", "HG204", "HG205", "HG206", "HG207",
"HG208", "HG209", "HG210", "TL", "TL176", "TL177", "TL178", "TL179", "TL180",
"TL181", "TL182", "TL183", "TL184", "TL185", "TL186", "TL187", "TL188",
"TL189", "TL190", "TL191", "TL192", "TL193", "TL194", "TL195", "TL196",
"TL197", "TL198", "TL199", "TL200", "TL201", "TL202", "TL203", "TL204",
"TL205", "TL206", "TL207", "TL208", "TL209", "TL210", "TL211", "TL212", "PB",
"PB178", "PB179", "PB180", "PB181", "PB182", "PB183", "PB184", "PB185",
"PB186", "PB187", "PB188", "PB189", "PB190", "PB191", "PB192", "PB193",
"PB194", "PB195", "PB196", "PB197", "PB198", "PB199", "PB200", "PB201",
"PB202", "PB203", "PB204", "PB205", "PB206", "PB207", "PB208", "PB209",
"PB210", "PB211", "PB212", "PB213", "PB214", "PB215", "BI", "BI184", "BI185",
"BI186", "BI187", "BI188", "BI189", "BI190", "BI191", "BI192", "BI193",
"BI194", "BI195", "BI196", "BI197", "BI198", "BI199", "BI200", "BI201",
"BI202", "BI203", "BI204", "BI205", "BI206", "BI207", "BI208", "BI209",
"BI210", "BI211", "BI212", "BI213", "BI214", "BI215", "BI216", "BI217",
"BI218", "PO", "PO188", "PO189", "PO190", "PO191", "PO192", "PO193", "PO194",
"PO195", "PO196", "PO197", "PO198", "PO199", "PO200", "PO201", "PO202",
"PO203", "PO204", "PO205", "PO206", "PO207", "PO208", "PO209", "PO210",
"PO211", "PO212", "PO213", "PO214", "PO215", "PO216", "PO217", "PO218",
"PO219", "PO220", "AT", "AT193", "AT194", "AT195", "AT196", "AT197", "AT198",
"AT199", "AT200", "AT201", "AT202", "AT203", "AT204", "AT205", "AT206",
"AT207", "AT208", "AT209", "AT210", "AT211", "AT212", "AT213", "AT214",
"AT215", "AT216", "AT217", "AT218", "AT219", "AT220", "AT221", "AT222",
"AT223", "RN", "RN195", "RN196", "RN197", "RN198", "RN199", "RN200", "RN201",
"RN202", "RN203", "RN204", "RN205", "RN206", "RN207", "RN208", "RN209",
"RN210", "RN211", "RN212", "RN213", "RN214", "RN215", "RN216", "RN217",
"RN218", "RN219", "RN220", "RN221", "RN222", "RN223", "RN224", "RN225",
"RN226", "RN227", "RN228", "FR", "FR199", "FR200", "FR201", "FR202", "FR203",
"FR204", "FR205", "FR206", "FR207", "FR208", "FR209", "FR210", "FR211",
"FR212", "FR213", "FR214", "FR215", "FR216", "FR217", "FR218", "FR219",
"FR220", "FR221", "FR222", "FR223", "FR224", "FR225", "FR226", "FR227",
"FR228", "FR229", "FR230", "FR231", "FR232", "RA", "RA202", "RA203", "RA204",
"RA205", "RA206", "RA207", "RA208", "RA209", "RA210", "RA211", "RA212",
"RA213", "RA214", "RA215", "RA216", "RA217", "RA218", "RA219", "RA220",
"RA221", "RA222", "RA223", "RA224", "RA225", "RA226", "RA227", "RA228",
"RA229", "RA230", "RA231", "RA232", "RA233", "RA234", "AC", "AC206", "AC207",
"AC208", "AC209", "AC210", "AC211", "AC212", "AC213", "AC214", "AC215",
"AC216", "AC217", "AC218", "AC219", "AC220", "AC221", "AC222", "AC223",
"AC224", "AC225", "AC226", "AC227", "AC228", "AC229", "AC230", "AC231",
"AC232", "AC233", "AC234", "AC235", "AC236", "TH", "TH209", "TH210", "TH211",
"TH212", "TH213", "TH214", "TH215", "TH216", "TH217", "TH218", "TH219",
"TH220", "TH221", "TH222", "TH223", "TH224", "TH225", "TH226", "TH227",
"TH228", "TH229", "TH230", "TH231", "TH232", "TH233", "TH234", "TH235",
"TH236", "TH237", "TH238", "PA", "PA212", "PA213", "PA214", "PA215", "PA216",
"PA217", "PA218", "PA219", "PA220", "PA221", "PA222", "PA223", "PA224",
"PA225", "PA226", "PA227", "PA228", "PA229", "PA230", "PA231", "PA232",
"PA233", "PA234", "PA235", "PA236", "PA237", "PA238", "PA239", "PA240", "U",
"U217", "U218", "U219", "U220", "U221", "U222", "U223", "U224", "U225", "U226",
"U227", "U228", "U229", "U230", "U231", "U232", "U233", "U234", "U235", "U236",
"U237", "U238", "U239", "U240", "U241", "U242", "NP", "NP225", "NP226",
"NP227", "NP228", "NP229", "NP230", "NP231", "NP232", "NP233", "NP234",
"NP235", "NP236", "NP237", "NP238", "NP239", "NP240", "NP241", "NP242",
"NP243", "NP244", "PU", "PU228", "PU229", "PU230", "PU231", "PU232", "PU233",
"PU234", "PU235", "PU236", "PU237", "PU238", "PU239", "PU240", "PU241",
"PU242", "PU243", "PU244", "PU245", "PU246", "PU247", "AM", "AM231", "AM232",
"AM233", "AM234", "AM235", "AM236", "AM237", "AM238", "AM239", "AM240",
"AM241", "AM242", "AM243", "AM244", "AM245", "AM246", "AM247", "AM248",
"AM249", "CM", "CM233", "CM234", "CM235", "CM236", "CM237", "CM238", "CM239",
"CM240", "CM241", "CM242", "CM243", "CM244", "CM245", "CM246", "CM247",
"CM248", "CM249", "CM250", "CM251", "CM252", "BK", "BK235", "BK236", "BK237",
"BK238", "BK239", "BK240", "BK241", "BK242", "BK243", "BK244", "BK245",
"BK246", "BK247", "BK248", "BK249", "BK250", "BK251", "BK252", "BK253",
"BK254", "CF", "CF237", "CF238", "CF239", "CF240", "CF241", "CF242", "CF243",
"CF244", "CF245", "CF246", "CF247", "CF248", "CF249", "CF250", "CF251",
"CF252", "CF253", "CF254", "CF255", "CF256", "ES", "ES240", "ES241", "ES242",
"ES243", "ES244", "ES245", "ES246", "ES247", "ES248", "ES249", "ES250",
"ES251", "ES252", "ES253", "ES254", "ES255", "ES256", "ES257", "ES258", "FM",
"FM242", "FM243", "FM244", "FM245", "FM246", "FM247", "FM248", "FM249",
"FM250", "FM251", "FM252", "FM253", "FM254", "FM255", "FM256", "FM257",
"FM258", "FM259", "FM260", "MD", "MD245", "MD246", "MD247", "MD248", "MD249",
"MD250", "MD251", "MD252", "MD253", "MD254", "MD255", "MD256", "MD257",
"MD258", "MD259", "MD260", "MD261", "MD262", "NO", "NO248", "NO249", "NO250",
"NO251", "NO252", "NO253", "NO254", "NO255", "NO256", "NO257", "NO258",
"NO259", "NO260", "NO261", "NO262", "NO263", "NO264", "LR", "LR251", "LR252",
"LR253", "LR254", "LR255", "LR256", "LR257", "LR258", "LR259", "LR260",
"LR261", "LR262", "LR263", "LR264", "LR265", "LR266", "RF", "RF253", "RF254",
"RF255", "RF256", "RF257", "RF258", "RF259", "RF260", "RF261", "RF262",
"RF263", "RF264", "RF265", "RF266", "RF267", "RF268", "DB", "DB255", "DB256",
"DB257", "DB258", "DB259", "DB260", "DB261", "DB262", "DB263", "DB264",
"DB265", "DB266", "DB267", "DB268", "DB269", "DB270", "SG", "SG258", "SG259",
"SG260", "SG261", "SG262", "SG263", "SG264", "SG265", "SG266", "SG267",
"SG268", "SG269", "SG270", "SG271", "SG272", "SG273", "BH", "BH260", "BH261",
"BH262", "BH263", "BH264", "BH265", "BH266", "BH267", "BH268", "BH269",
"BH270", "BH271", "BH272", "BH273", "BH274", "BH275", "HS", "HS263", "HS264",
"HS265", "HS266", "HS267", "HS268", "HS269", "HS270", "HS271", "HS272",
"HS273", "HS274", "HS275", "HS276", "HS277", "MT", "MT265", "MT266", "MT267",
"MT268", "MT269", "MT270", "MT271", "MT272", "MT273", "MT274", "MT275",
"MT276", "MT277", "MT278", "MT279", "DS", "DS267", "DS268", "DS269", "DS270",
"DS271", "DS272", "DS273", "DS274", "DS275", "DS276", "DS277", "DS278",
"DS279", "DS280", "DS281", "RG", "RG272", "RG273", "RG274", "RG275", "RG276",
"RG277", "RG278", "RG279", "RG280", "RG281", "RG282", "RG283", "UUB",
"UUB277", "UUB278", "UUB279", "UUB280", "UUB281", "UUB282", "UUB283",
"UUB284", "UUB285", "UUT", "UUT283", "UUT284", "UUT285", "UUT286", "UUT287",
"UUQ", "UUQ285", "UUQ286", "UUQ287", "UUQ288", "UUQ289", "UUP", "UUP287",
"UUP288", "UUP289", "UUP290", "UUP291", "UUH", "UUH289", "UUH290", "UUH291",
"UUH292", "UUS", "UUS291", "UUS292", "UUO", "UUO293"]
_temp_iso_mass = [
1.00782503207, 1.00782503207, 2.01410177785, 2.01410177785, 3.01604927767,
3.01604927767, 4.027806424, 5.035311488, 6.044942594, 7.052749,
4.00260325415, 3.01602931914, 4.00260325415, 5.012223624, 6.018889124,
7.028020618, 8.033921897, 9.043950286, 10.052398837, 7.016004548, 3.030775,
4.027185558, 5.0125378, 6.015122794, 7.016004548, 8.022487362, 9.026789505,
10.035481259, 11.043797715, 12.053780, 9.012182201, 5.040790, 6.019726317,
7.016929828, 8.005305103, 9.012182201, 10.013533818, 11.021657749,
12.026920737, 13.035693007, 14.04289292, 15.053460, 16.061920, 11.009305406,
6.046810, 7.029917901, 8.024607233, 9.013328782, 10.012936992, 11.009305406,
12.014352104, 13.017780217, 14.025404009, 15.031103021, 16.039808829,
17.046989906, 18.056170, 19.063730, 12, 8.037675025, 9.031036689,
10.016853228, 11.011433613, 12, 13.00335483778, 14.0032419887, 15.010599256,
16.014701252, 17.022586116, 18.026759354, 19.034805018, 20.040319754,
21.049340, 22.057200, 14.00307400478, 10.041653674, 11.026090956,
12.018613197, 13.005738609, 14.00307400478, 15.00010889823, 16.006101658,
17.008450261, 18.014078959, 19.017028697, 20.023365807, 21.02710824,
22.034394934, 23.041220, 24.051040, 25.060660, 15.99491461956,
12.034404895, 13.024812213, 14.00859625, 15.003065617, 15.99491461956,
16.999131703, 17.999161001, 19.00358013, 20.004076742, 21.008655886,
22.009966947, 23.015687659, 24.020472917, 25.029460, 26.038340, 27.048260,
28.057810, 18.998403224, 14.035060, 15.018009103, 16.011465724,
17.002095237, 18.000937956, 18.998403224, 19.999981315, 20.999948951,
22.002998815, 23.003574631, 24.008115485, 25.012101747, 26.019615555,
27.026760086, 28.035670, 29.043260, 30.052500, 31.060429, 19.99244017542,
16.025761262, 17.017671504, 18.005708213, 19.001880248, 19.99244017542,
20.993846684, 21.991385113, 22.994466904, 23.993610779, 24.997736888,
26.000461206, 27.007589903, 28.012071575, 29.019385933, 30.024801045,
31.033110, 32.040020, 33.049380, 34.057028, 22.98976928087, 18.025969,
19.013877499, 20.007351328, 20.997655206, 21.994436425, 22.98976928087,
23.990962782, 24.989953968, 25.992633, 26.994076788, 27.998938, 29.002861,
30.008976, 31.013585452, 32.02046656, 33.026719756, 34.035170, 35.042493,
36.051480, 37.059340, 23.985041699, 19.03547, 20.018862545, 21.01171291,
21.999573843, 22.994123669, 23.985041699, 24.985836917, 25.982592929,
26.984340585, 27.983876825, 28.9886, 29.990434, 30.996546, 31.998975,
33.005254, 34.009456424, 35.017340, 36.023000, 37.031400, 38.037570,
39.046772, 40.053930, 26.981538627, 21.028040, 22.019520, 23.007267432,
23.999938865, 24.990428095, 25.986891692, 26.981538627, 27.981910306,
28.980445046, 29.982960256, 30.983946619, 31.988124489, 32.990843336,
33.996851837, 34.999860235, 36.006207204, 37.01067782, 38.017231021,
39.02297, 40.031450, 41.038330, 42.046890, 27.97692653246, 22.034530,
23.025520, 24.011545616, 25.004105574, 25.992329921, 26.986704905,
27.97692653246, 28.9764947, 29.973770171, 30.975363226999998,
31.974148082, 32.97800022, 33.978575524, 34.984583575, 35.986599477,
36.99293608, 37.995633601, 39.002070013, 40.005869121, 41.01456,
42.019790, 43.028660, 44.035260, 30.973761629, 24.034350, 25.020260,
26.011780, 26.999230236, 27.992314761, 28.981800606, 29.978313789,
30.973761629, 31.973907274, 32.971725543, 33.973636257, 34.973314117,
35.97825968, 36.979608946, 37.984156827, 38.986179475, 39.991296951,
40.994335435, 42.001007913, 43.00619, 44.012990, 45.019220, 46.027380,
31.972070999, 26.027880, 27.018833, 28.004372763, 28.996608049,
29.984903249, 30.979554728, 31.972070999, 32.971458759, 33.967866902,
34.969032161, 35.96708076, 36.971125567, 37.971163317, 38.975134306,
39.975451728, 40.979582149, 41.981022419, 42.98715479, 43.99021339,
44.996508112, 46.000750, 47.008590, 48.014170, 49.023619, 34.968852682,
28.028510, 29.014110, 30.004770, 30.992413086, 31.985689901, 32.977451887,
33.973762819, 34.968852682, 35.968306981, 36.965902591, 37.968010425,
38.968008164, 39.970415472, 40.970684525, 41.973254804, 42.974054403,
43.978281071, 44.980286886, 45.98421004, 46.988710, 47.994950, 49.000320,
50.007840, 51.014490, 39.96238312251, 30.021560, 31.012123, 31.997637984,
32.989925709, 33.980271244, 34.975257585, 35.967545105, 36.96677632,
37.962732394, 38.964313231, 39.96238312251, 40.964500611, 41.963045736,
42.965636056, 43.964924033, 44.968039956, 45.968094129, 46.972186792,
47.974540, 48.980520, 49.984430, 50.991630, 51.996780, 53.004940,
38.963706679, 32.021920, 33.007260, 33.998410, 34.988009692, 35.981292235,
36.973375889, 37.969081184, 38.963706679, 39.963998475, 40.961825762,
41.96240281, 42.96071554, 43.961556804, 44.960699493, 45.961976864,
46.961678473, 47.965513535, 48.967450928, 49.972783355, 50.976380,
51.982610, 52.987120, 53.994200, 54.999710, 39.962590983, 34.014120,
35.004940, 35.993087063, 36.985870269, 37.976318452, 38.970719725,
39.962590983, 40.962278062, 41.958618014, 42.958766628, 43.955481754,
44.956186566, 45.953692587, 46.954546006, 47.952534177, 48.955674148,
49.957518962, 50.961499214, 51.9651, 52.970050, 53.974350, 54.980550,
55.985570, 56.992356, 44.955911909, 36.014920, 37.003050, 37.994700,
38.984790002, 39.977967407, 40.969251125, 41.965516429, 42.961150658,
43.959402752, 44.955911909, 45.95517189, 46.952407508, 47.952231468,
48.950023975, 49.952187685, 50.953603368, 51.956675468, 52.959610,
53.963264561, 54.968243949, 55.972870, 56.977790, 57.983710, 58.989220,
59.995710, 47.947946281, 38.009770, 39.001610, 39.990498838, 40.983145,
41.973030902, 42.968522499, 43.959690069, 44.958125616, 45.952631555,
46.951763088, 47.947946281, 48.947869982, 49.944791194, 50.946614955,
51.946897311, 52.949727171, 53.951052401, 54.955265056, 55.958199639,
56.963989137, 57.966970, 58.972930, 59.976760, 60.983200, 61.987490,
62.994420, 50.943959507, 40.011090, 40.999780, 41.991230, 42.980650,
43.97411, 44.965775808, 45.960200481, 46.95490894, 47.952253707,
48.948516101, 49.947158485, 50.943959507, 51.944775479, 52.944337979,
53.946439854, 54.947233701, 55.950530966, 56.952561432, 57.956834136,
58.960207407, 59.965026862, 60.968480, 61.973780, 62.977550, 63.983470,
64.987920, 51.940507472, 42.006430, 42.997710, 43.985549, 44.97964,
45.968358635, 46.962900046, 47.954031716, 48.951335721, 49.946044205,
50.944767431, 51.940507472, 52.940649386, 53.938880395, 54.940839672,
55.940653139, 56.943613013, 57.944353129, 58.948586367, 59.950076033,
60.954717204, 61.95661319, 62.961860, 63.964410, 64.970160, 65.973380,
66.979550, 54.938045141, 44.006870, 44.994510, 45.986720, 46.976100,
47.96852, 48.959618005, 49.95423823, 50.948210787, 51.945565464,
52.941290117, 53.940358854, 54.938045141, 55.93890491, 56.938285378,
57.939981549, 58.940440237, 59.942911246, 60.944652638, 61.94842822,
62.95023999, 63.95424909, 64.956336065, 65.961080, 66.964140, 67.969300,
68.972840, 55.934937475, 45.014578, 46.000810, 46.992890, 47.980504,
48.973610, 49.962988982, 50.956819538, 51.948113875, 52.945307942,
53.939610501, 54.938293357, 55.934937475, 56.935393969, 57.933275558,
58.934875464, 59.934071683, 60.936745281, 61.936767442, 62.940369091,
63.941201265, 64.94538027, 65.946780638, 66.950947244, 67.9537, 68.958780,
69.961460, 70.966720, 71.969620, 58.933195048, 47.011490, 48.001760,
48.989720, 49.981540, 50.970720, 51.963590, 52.954218896, 53.948459635,
54.941999029, 55.939839278, 56.936291373, 57.935752814, 58.933195048,
59.933817059, 60.932475763, 61.934050563, 62.933611611, 63.935809908,
64.93647846, 65.939762004, 66.940889529, 67.944873058, 68.94632, 69.951,
70.9529, 71.957810, 72.960240, 73.965380, 74.968330, 57.935342907,
48.019750, 49.009660, 49.995930, 50.987720, 51.975680, 52.968470,
53.957905495, 54.951330251, 55.942132022, 56.939793526, 57.935342907,
58.934346705, 59.930786372, 60.931056033, 61.928345115, 62.929669374,
63.927965959, 64.930084304, 65.929139334, 66.931569414, 67.931868789,
68.935610269, 69.9365, 70.940736283, 71.942092682, 72.946470, 73.948070,
74.952870, 75.955330, 76.960550, 77.963180, 62.929597474, 51.997180,
52.985550, 53.976710, 54.966050, 55.958560, 56.949211078, 57.944538499,
58.939498028, 59.93736503, 60.933457821, 61.932583745, 62.929597474,
63.929764183, 64.927789485, 65.928868813, 66.927730314, 67.929610889,
68.929429269, 69.932392343, 70.932676833, 71.935820307, 72.936675282,
73.939874862, 74.9419, 75.945275026, 76.947850, 77.951960, 78.954560,
79.960870, 63.929142222, 53.992950, 54.983980, 55.972380, 56.964788,
57.954591555, 58.949263764, 59.941827035, 60.939510635, 61.934329764,
62.933211566, 63.929142222, 64.929240984, 65.926033419, 66.927127345,
67.924844154, 68.926550281, 69.925319274, 70.927721599, 71.926857951,
72.929779104, 73.929458609, 74.932936741, 75.93329357, 76.936958967,
77.938440216, 78.942652, 79.944342348, 80.950480, 81.954420, 82.961030,
68.925573587, 55.994910, 56.982930, 57.974250, 58.963370, 59.957060,
60.949446287, 61.944175238, 62.939294196, 63.936838747, 64.932734754,
65.93158901, 66.928201703, 67.927980084, 68.925573587, 69.926021972,
70.924701349, 71.926366268, 72.925174682, 73.926945762, 74.926500246,
75.928827626, 76.9291543, 77.93160818, 78.93289326, 79.936515781,
80.937752355, 81.942990, 82.946980, 83.952650, 84.957000, 85.963120,
73.921177767, 57.991010, 58.981750, 59.970190, 60.963790, 61.954650,
62.949640, 63.941653, 64.939436406, 65.933843453, 66.93273407,
67.92809424, 68.927964533, 69.924247381, 70.924950954, 71.922075815,
72.923458945, 73.921177767, 74.922858948, 75.921402557, 76.923548591,
77.922852739, 78.925400995, 79.925372392, 80.928820467, 81.929549725,
82.934620, 83.937470, 84.943030, 85.946490, 86.952510, 87.956910,
88.963830, 74.921596478, 59.993130, 60.980620, 61.973200, 62.963690,
63.957572, 64.949564, 65.94471, 66.939186071, 67.936769069, 68.932273675,
69.930924826, 70.927112428, 71.926752283, 72.923824844, 73.923928692,
74.921596478, 75.922394021, 76.920647286, 77.921827281, 78.920947934,
79.922533816, 80.922132287, 81.924504067, 82.924980024, 83.929058,
84.932020, 85.936500, 86.939900, 87.944940, 88.949390, 89.955500,
90.960430, 91.966800, 79.916521271, 64.964660, 65.955210, 66.950090,
67.941798, 68.939557817, 69.933390644, 70.932241822, 71.927112352,
72.926765345, 73.922476436, 74.922523368, 75.919213597, 76.919914038,
77.91730909, 78.918499098, 79.916521271, 80.917992474, 81.916699401,
82.919118473, 83.918462354, 84.922245053, 85.924271579, 86.928521358,
87.931423998, 88.936450, 89.939960, 90.945960, 91.949920, 92.956290,
93.960490, 78.918337087, 66.964790, 67.958516, 68.950106, 69.944792,
70.93874, 71.936644572, 72.931691524, 73.929891034, 74.925776207,
75.924541469, 76.921379082, 77.921145706, 78.918337087, 79.918529296,
80.916290563, 81.916804119, 82.915180421, 83.916478974, 84.915608403,
85.918797577, 86.920711324, 87.924065926, 88.926385334, 89.930627737,
90.933968095, 91.939258714, 92.943050, 93.948680, 94.952870, 95.958530,
96.962800, 85.910610729, 68.965180, 69.955259, 70.949625738, 71.942092038,
72.939289195, 73.933084369, 74.930945746, 75.925910078, 76.92467,
77.920364783, 78.920082431, 79.916378965, 80.916592015, 81.9134836,
82.914136099, 83.911506687, 84.912527331, 85.910610729, 86.913354862,
87.914446969, 88.917630581, 89.919516555, 90.923445215, 91.92615621,
92.931274357, 93.934360, 94.939840, 95.943070, 96.948560, 97.951910,
98.957600, 99.961140, 84.911789737, 70.965320, 71.959080, 72.950561,
73.944264751, 74.93857, 75.935072226, 76.930408, 77.928141, 78.92398946,
79.92251925, 80.918995913, 81.918208598, 82.915109701, 83.914384821,
84.911789737, 85.911167419, 86.909180526, 87.911315588, 88.912278016,
89.914801694, 90.916536958, 91.9197289, 92.922041876, 93.926404946,
94.929302889, 95.934272637, 96.937351916, 97.941790668, 98.945379283,
99.949870, 100.953196445, 101.958870, 87.905612124, 72.965970,
73.956310, 74.949949568, 75.941766782, 76.937944782, 77.93218,
78.929708, 79.924521013, 80.923211846, 81.918401639, 82.917556701,
83.913425275, 84.912932803, 85.909260204, 86.908877124, 87.905612124,
88.907450675, 89.907737888, 90.910203095, 91.911037858, 92.914025634,
93.915361312, 94.919358766, 95.921696802, 96.926152923, 97.928452934,
98.933240926, 99.935351911, 100.940517888, 101.943018987, 102.948950,
103.952330, 104.958580, 88.905848295, 75.958450, 76.949645, 77.943610,
78.937351634, 79.93428, 80.929127468, 81.926792451, 82.922354243,
83.920388264, 84.916433039, 85.914885576, 86.91087573, 87.909501146,
88.905848295, 89.907151886, 90.907304791, 91.908949143, 92.909582713,
93.911595245, 94.912820621, 95.915891343, 96.918133995, 97.92220302,
98.924636204, 99.927756586, 100.93031385, 101.933555695, 102.936730,
103.941050, 104.944870, 105.949790, 106.954140, 107.959480,
89.904704416, 77.955230, 78.949160, 79.9404, 80.937210026, 81.931087,
82.928653801, 83.923250, 84.921471182, 85.916473591, 86.914816252,
87.910226904, 88.9088895, 89.904704416, 90.905645767, 91.905040847,
92.906476006, 93.906315192, 94.9080426, 95.908273386, 96.910953109,
97.912734892, 98.916512106, 99.917761889, 100.921140415, 101.922981285,
102.926599606, 103.928780, 104.933050, 105.935910, 106.940750,
107.943960, 108.949240, 109.952870, 92.906378058, 80.949030,
81.943130, 82.936705382, 83.933570, 84.927912447, 85.925038326,
86.920361108, 87.918332163, 88.913418245, 89.911264845,
90.906996243, 91.907193888, 92.906378058, 93.907283888, 94.906835792,
95.908100647, 96.908098556, 97.910328412, 98.911618375, 99.914181619,
100.915252025, 101.918037614, 102.919143842, 103.922464701,
104.923936545, 105.927970, 106.930310, 107.934840, 108.937630,
109.942440, 110.945650, 111.950830, 112.954700, 97.905408169, 82.948740,
83.940090, 84.936550, 85.930695904, 86.927326502, 87.921953241,
88.919480009, 89.913936896, 90.911750194, 91.906810991, 92.90681261,
93.905088269, 94.905842129, 95.904679477, 96.906021465, 97.905408169,
98.90771187, 99.907477336, 100.910347001, 101.91029736, 102.913207142,
103.913763625, 104.91697461, 105.918136802, 106.921692604, 107.923453,
108.927810, 109.929730, 110.934410, 111.936840, 112.941880, 113.944920,
114.950290, 98.906254747, 84.948830, 85.942880, 86.936530, 87.932678,
88.927167, 89.923556564, 90.918427639, 91.915260166, 92.910248984,
93.909657002, 94.907657084, 95.907871383, 96.906365358, 97.907215966,
98.906254747, 99.90765778, 100.907314659, 101.909215019, 102.909181351,
103.911447454, 104.911660566, 105.914357927, 106.915079572, 107.918461226,
108.919982665, 109.923820483, 110.92569283, 111.929146493, 112.931590,
113.935880, 114.938690, 115.943370, 116.946480, 117.951480, 101.904349312,
86.949180, 87.940260, 88.936110, 89.929890, 90.926292, 91.920120,
92.917052034, 93.911359711, 94.910412929, 95.907597835, 96.9075547,
97.905287132, 98.905939302, 99.904219476, 100.905582087, 101.904349312,
102.906323847, 103.905432701, 104.907752866, 105.907329433,
106.909905089, 107.910173465, 108.913203233, 109.914136041, 110.917696,
111.918965, 112.922487194, 113.924281, 114.928686173, 115.930810,
116.935580, 117.937820, 118.942840, 119.945310, 102.905504292,
88.948837, 89.942870, 90.936550, 91.931980, 92.925740, 93.921698,
94.91589874, 95.914460631, 96.911336797, 97.910708158, 98.908132104,
99.90812155, 100.906163625, 101.906843196, 102.905504292, 103.906655518,
104.905693821, 105.907287135, 106.906748423, 107.908728018, 108.908737289,
109.911136411, 110.911585913, 111.914394159, 112.915530627, 113.918806,
114.920334, 115.924062, 116.925980, 117.930070, 118.932110, 119.936410,
120.938720, 121.943210, 105.903485715, 90.949110, 91.940420, 92.935910,
93.928770, 94.924690, 95.918164359, 96.916479073, 97.912720902,
98.911767833, 99.908505886, 100.908289242, 101.905608544, 102.906087307,
103.904035834, 104.90508492, 105.903485715, 106.905133481, 107.903891701,
108.905950451, 109.905153254, 110.907670734, 111.907314058, 112.910152908,
113.910362638, 114.913683824, 115.914158662, 116.917841338, 117.9189843,
118.923110, 119.924691878, 120.928870, 121.930550, 122.934930, 123.936880,
106.90509682, 92.949780, 93.942780, 94.935480, 95.930680, 96.923972412,
97.921566201, 98.917597178, 99.916104255, 100.912802233, 101.911685,
102.90897272, 103.908629157, 104.906528661, 105.906668921, 106.90509682,
107.905955556, 108.904752292, 109.906107231, 110.905291157, 111.907004814,
112.906566579, 113.908803704, 114.908762698, 115.911359933, 116.911684562,
117.914582768, 118.915665059, 119.918787384, 120.919848046, 121.923530,
122.924900, 123.928640, 124.930430, 125.934500, 126.936770, 127.941170,
128.943690, 129.950448, 113.90335854, 94.949870, 95.939770, 96.934940,
97.927395546, 98.925010, 99.920289525, 100.918681538, 101.914462258,
102.913419246, 103.909849475, 104.909467905, 105.90645941, 106.906617928,
107.904183683, 108.904982293, 109.90300207, 110.904178107, 111.902757809,
112.904401662, 113.90335854, 114.905430969, 115.904755809, 116.907218618,
117.90691453, 118.909921597, 119.909850129, 120.912977363, 121.913332432,
122.917002999, 123.917647616, 124.92124637, 125.922353321, 126.926443864,
127.927762285, 128.932150, 129.933901937, 130.940670, 131.945550,
114.903878484, 96.949540, 97.942140, 98.934220, 99.931110851,
100.926340, 101.924090238, 102.919914188, 103.918296171, 104.91467354,
105.913465411, 106.9102951, 107.90969818, 108.907150507, 109.907165274,
110.905103278, 111.905532331, 112.904057761, 113.904913876,
114.903878484, 115.905259703, 116.904513564, 117.906354367, 118.90584535,
119.907959608, 120.907845822, 121.91027601, 122.910438276, 123.913175231,
124.913600588, 125.916463857, 126.917353091, 127.920172328, 128.92169698,
129.924970049, 130.926851767, 131.93299026, 132.937810, 133.944150,
134.949330, 119.902194676, 98.949330, 99.939044343, 100.936060,
101.930295324, 102.928100, 103.923143223, 104.921349437, 105.91688062,
106.915644329, 107.911925378, 108.911283214, 109.907842791, 110.90773446,
111.904818207, 112.905170577, 113.902778869, 114.903342397, 115.90174053,
116.902951656, 117.901603167, 118.90330763, 119.902194676, 120.90423548,
121.903439046, 122.905720838, 123.905273946, 124.907784125, 125.90765328,
126.910360024, 127.910536624, 128.913479, 129.913967295, 130.916999769,
131.917815713, 132.923829249, 133.928291765, 134.934730, 135.939340,
136.945990, 120.903815686, 102.939690, 103.936472, 104.931486348,
105.928791, 106.924150, 107.922160, 108.918132426, 109.916753, 110.913163,
111.912398009, 112.909371672, 113.909269, 114.906598, 115.906793629,
116.904835941, 117.905528731, 118.903942009, 119.905072427, 120.903815686,
121.905173651, 122.90421397, 123.905935743, 124.905253818, 125.90724748,
126.906923609, 127.909169001, 128.909148442, 129.911656324, 130.911982275,
131.914466896, 132.91525163, 133.920379744, 134.925165771, 135.930350,
136.935310, 137.940790, 138.945980, 129.906224399, 104.943640,
105.937504237, 106.935006, 107.929444597, 108.927415515, 109.922407316,
110.921110692, 111.917013672, 112.915891, 113.912089, 114.911902,
115.90846, 116.908644719, 117.905827581, 118.906403645, 119.904020222,
120.904936424, 121.903043898, 122.904270029, 123.902817896, 124.904430731,
125.903311696, 126.905226336, 127.904463056, 128.906598238, 129.906224399,
130.908523864, 131.90855316, 132.910955306, 133.911368737, 134.916448592,
135.920101246, 136.925322954, 137.929220, 138.934730, 139.938850,
140.944650, 141.949080, 126.904472681, 107.943475, 108.938149417,
109.935242, 110.930276, 111.927970, 112.923640583, 113.921850, 114.918048,
115.916808633, 116.91365, 117.913074, 118.910074, 119.910048173,
120.907366811, 121.907589284, 122.905588965, 123.906209852, 124.904630164,
125.905624153, 126.904472681, 127.905809443, 128.904987722, 129.906674247,
130.906124609, 131.907997381, 132.907796939, 133.909744465, 134.910048121,
135.914653993, 136.91787084, 137.922349591, 138.926099478, 139.931000,
140.935030, 141.940180, 142.944560, 143.949990, 131.904153457, 109.944278068,
110.941602, 111.935623112, 112.933341174, 113.927980306, 114.92629392,
115.921581087, 116.920358735, 117.916178655, 118.915410688, 119.911784244,
120.911461829, 121.908367632, 122.90848191, 123.905893003, 124.906395464,
125.904273634, 126.905183723, 127.903531275, 128.904779435, 129.903508007,
130.905082362, 131.904153457, 132.905910722, 133.905394464, 134.907227495,
135.907218794, 136.911562125, 137.913954475, 138.918792936, 139.921640943,
140.926648049, 141.92970959, 142.935110, 143.938510, 144.944070, 145.947750,
146.953560, 132.905451932, 111.950301, 112.944493274, 113.941450, 114.935910,
115.933367, 116.928670701, 117.926559494, 118.922377304, 119.920677253,
120.917229209, 121.916113434, 122.912996036, 123.912257798, 124.90972827,
125.909451977, 126.907417525, 127.907748866, 128.906064426, 129.906708552,
130.905463926, 131.90643426, 132.905451932, 133.906718475, 134.905977008,
135.907311576, 136.907089473, 137.911016704, 138.913363999, 139.917282354,
140.920045752, 141.924298927, 142.92735175, 143.932076914, 144.93552617,
145.940289423, 146.944155008, 147.949218153, 148.952930, 149.958170,
150.962190, 137.905247237, 113.950675405, 114.947370, 115.941380,
116.938499, 117.933040, 118.930659661, 119.926044974, 120.924054499,
121.919904, 122.918781036, 123.915093603, 124.914472912, 125.911250177,
126.911093797, 127.908317698, 128.908679439, 129.906320811, 130.906941118,
131.905061288, 132.90600749, 133.904508383, 134.905688591, 135.904575945,
136.905827384, 137.905247237, 138.908841341, 139.910604505, 140.914411009,
141.91645341, 142.920626719, 143.922952853, 144.927627032, 145.930219572,
146.934945, 147.937720047, 148.942580, 149.945680, 150.950810, 151.954270,
152.959610, 138.906353267, 116.950068, 117.946730, 118.940990, 119.938070,
120.933010, 121.930710, 122.926240, 123.924574275, 124.920816034,
125.919512667, 126.916375448, 127.915585177, 128.912692815, 129.912368724,
130.91007, 131.910101145, 132.908218, 133.908514011, 134.906976844,
135.907635536, 136.906493598, 137.90711193, 138.906353267, 139.909477645,
140.910962152, 141.91407913, 142.91606272, 143.919599647, 144.921645401,
145.92579346, 146.928235284, 147.932228868, 148.934734, 149.938770,
150.941720, 151.946250, 152.949620, 153.954500, 154.958350, 139.905438706,
118.952760, 119.946640, 120.943420, 121.937910, 122.935400, 123.930410,
124.928440, 125.923971, 126.922731, 127.918911, 128.918102, 129.914736,
130.914422, 131.911460487, 132.91151502, 133.908924821, 134.909151396,
135.907172422, 136.907805577, 137.905991321, 138.906652651, 139.905438706,
140.90827627, 141.909244205, 142.91238591, 143.913647336, 144.917233135,
145.918759009, 146.922673954, 147.92443241, 148.928399883, 149.930408931,
150.933976196, 151.936540, 152.940580, 153.943420, 154.948040, 155.951260,
156.956340, 140.907652769, 120.955364, 121.951810, 122.945960, 123.942960,
124.937830, 125.935310, 126.930830, 127.928791, 128.925095, 129.92359,
130.920259, 131.919255, 132.916330532, 133.915711737, 134.913111745,
135.912691611, 136.910705455, 137.910754636, 138.908938399, 139.909075874,
140.907652769, 141.910044806, 142.910816926, 143.913305245, 144.9145117,
145.917644336, 146.918995992, 147.922135026, 148.923717651, 149.926672997,
150.928318618, 151.931499225, 152.933838905, 153.937518153, 154.940120,
155.944270, 156.947430, 157.951980, 158.955500, 141.907723297, 123.952230,
124.948880, 125.943220, 126.940500, 127.935390, 128.933188, 129.928506,
130.927247, 131.923321237, 132.922348, 133.918790181, 134.91818116,
135.914976035, 136.914567137, 137.911949961, 138.911978288, 139.909552,
140.909609854, 141.907723297, 142.90981429, 143.910087274, 144.912573636,
145.913116939, 146.916100441, 147.916893288, 148.920148842, 149.920890888,
150.923828929, 151.924682219, 152.927698232, 153.929477307, 154.932932,
155.935018114, 156.939030, 157.941600, 158.946090, 159.949090, 160.953880,
144.912749023, 125.957520, 126.951630, 127.948420, 128.943160, 129.940450,
130.935870, 131.933750, 132.929782, 133.928353, 134.924876, 135.923565829,
136.920479493, 137.919548281, 138.916804082, 139.916041789, 140.913555054,
141.912874471, 142.910932616, 143.912590843, 144.912749023, 145.914696305,
146.915138545, 147.917474618, 148.918334155, 149.920983561, 150.921206973,
151.923496795, 152.924116889, 153.926463943, 154.928101267, 155.931056736,
156.933039369, 157.936561407, 158.938970, 159.942990, 160.945860,
161.950290, 162.953680, 151.919732425, 127.958080, 128.954640, 129.948920,
130.946110, 131.940690, 132.938670, 133.933970, 134.93252, 135.928275527,
136.926971746, 137.923243961, 138.922296605, 139.918994687, 140.918476488,
141.915197641, 142.914628338, 143.911999478, 144.913410353, 145.9130409,
146.914897923, 147.914822674, 148.917184735, 149.917275539, 150.919932409,
151.919732425, 152.922097356, 153.922209273, 154.924640161, 155.925527887,
156.928358717, 157.929991317, 158.933211271, 159.935140, 160.938830,
161.941220, 162.945360, 163.948280, 164.952980, 152.921230339, 129.963569,
130.957753, 131.954370, 132.949240, 133.946510, 134.941820, 135.939600,
136.935570, 137.933709, 138.92979228, 139.928087607, 140.92493072,
141.923434945, 142.920297509, 143.918816823, 144.916265237, 145.917205817,
146.916746111, 147.918085895, 148.917931238, 149.919701819, 150.919850161,
151.921744534, 152.921230339, 153.922979237, 154.92289326, 155.924752249,
156.925423647, 157.927845302, 158.929088861, 159.931971, 160.933680,
161.937040, 162.939210, 163.942990, 164.945720, 165.949970, 166.953210,
157.924103912, 133.955370, 134.952570, 135.947340, 136.945020, 137.940120,
138.938240, 139.933674, 140.932126, 141.928116, 142.92674951, 143.922963,
144.921709252, 145.918310608, 146.91909442, 147.918114524, 148.919340915,
149.918658876, 150.920348482, 151.919790996, 152.921749543, 153.920865598,
154.922622022, 155.922122743, 156.923960135, 157.924103912, 158.926388658,
159.927054146, 160.929669211, 161.930984751, 162.933990, 163.935860,
164.939380, 165.941600, 166.945570, 167.948360, 168.952870, 158.925346757,
135.961380, 136.955980, 137.953160, 138.948290, 139.945805049, 140.941448,
141.938744, 142.935121, 143.933045, 144.929274, 145.927246584, 146.924044585,
147.924271701, 148.923245909, 149.923659686, 150.923102543, 151.924074438,
152.923434588, 153.924678019, 154.923505236, 155.924747213, 156.924024604,
157.925413137, 158.925346757, 159.927167606, 160.927569919, 161.929488234,
162.930647536, 163.933350838, 164.934880, 165.937991959, 166.940050,
167.943640, 168.946220, 169.950250, 170.953300, 163.929174751, 137.962490,
138.959540, 139.954010, 140.951350, 141.946366, 142.943830, 143.939254,
144.937425, 145.932845369, 146.9310915, 147.927149831, 148.927304787,
149.925585184, 150.926184601, 151.9247183, 152.92576467, 153.924424457,
154.925753775, 155.92428311, 156.925466095, 157.924409487, 158.925739214,
159.925197517, 160.926933364, 161.926798447, 162.928731159, 163.929174751,
164.931703333, 165.932806741, 166.935655462, 167.937128769, 168.940307614,
169.942390, 170.946200, 171.948760, 172.953000, 164.93032207, 139.968539,
140.963098, 141.959770, 142.954610, 143.951480, 144.947200, 145.944640,
146.940056, 147.937718, 148.933774771, 149.933496182, 150.931688142,
151.931713714, 152.930198789, 153.930601579, 154.929103491, 155.929839,
156.928256188, 157.928941007, 158.927711959, 159.928729478, 160.927854776,
161.929095504, 162.928733903, 163.930233507, 164.93032207, 165.932284162,
166.933132633, 167.935515708, 168.936872273, 169.939618929, 170.94146515,
171.944820, 172.947290, 173.951150, 174.954050, 165.930293061, 142.966340,
143.960380, 144.957390, 145.952000, 146.949490, 147.944550, 148.942306,
149.937913839, 150.937448903, 151.935050389, 152.935063492, 153.932783081,
154.933208949, 155.931064698, 156.931916, 157.929893474, 158.930684066,
159.929083292, 160.929995309, 161.928778264, 162.930032749, 163.929200229,
164.930726003, 165.930293061, 166.932048159, 167.932370224, 168.934590364,
169.935464312, 170.938029808, 171.939356113, 172.942400, 173.944230,
174.947770, 175.950080, 176.954050, 168.93421325, 144.970073, 145.966425,
146.960961, 147.957840, 148.952720, 149.949960, 150.94548349, 151.944422,
152.942012112, 153.941567808, 154.939199459, 155.938979933, 156.936973,
157.936979525, 158.934975, 159.935262801, 160.933549, 161.933994682,
162.932651124, 163.93356, 164.932435492, 165.933554131, 166.932851622,
167.934172776, 168.93421325, 169.935801397, 170.93642944, 171.938400044,
172.939603607, 173.942168605, 174.943836853, 175.946994685, 176.949040,
177.952640, 178.955340, 173.938862089, 147.967420, 148.964040, 149.958420,
150.955400769, 151.950288919, 152.949480, 153.946393928, 154.945782332,
155.942818215, 156.942627848, 157.939865617, 158.940050099, 159.937552344,
160.937901678, 161.93576821, 162.936334305, 163.934489416, 164.935279,
165.933882042, 166.934949605, 167.933896895, 168.935189802, 169.934761837,
170.936325799, 171.936381469, 172.938210787, 173.938862089, 174.94127645,
175.942571683, 176.945260822, 177.94664668, 178.950170, 179.952330,
180.956150, 174.940771819, 149.973228, 150.967577, 151.964120,
152.958767331, 153.957522, 154.954316216, 155.953032523, 156.9500983,
157.949313283, 158.946628776, 159.946033, 160.943572, 161.943277288,
162.941179, 163.941339, 164.939406724, 165.939859, 166.93827,
167.938739111, 168.937651439, 169.938474968, 170.937913136, 171.939085669,
172.938930602, 173.94033748, 174.940771819, 175.94268631, 176.943758055,
177.945954559, 178.947327443, 179.94988116, 180.951970, 181.955040,
182.957570, 183.960910, 179.946549953, 152.970690, 153.964860, 154.963390,
155.959364025, 156.958396, 157.954799366, 158.95399487, 159.950684379,
160.950274844, 161.947210498, 162.947089, 163.944367284, 164.944567,
165.94218, 166.9426, 167.940568, 168.941259, 169.939609, 170.940492,
171.939448301, 172.940513, 173.940046178, 174.941509181, 175.941408631,
176.943220651, 177.943698766, 178.945816145, 179.946549953, 180.949101246,
181.950554096, 182.953530439, 183.955446515, 184.958820, 185.960890,
186.964590, 187.966850, 180.947995763, 154.974592, 155.972303,
156.968192445, 157.966699, 158.963018173, 159.961486056, 160.958417,
161.957291859, 162.954330271, 163.953534, 164.950772514, 165.950512,
166.948093, 167.948047, 168.946011, 169.946175, 170.944476, 171.944895,
172.94375, 173.944454, 174.943737, 175.944857, 176.944472403,
177.945778221, 178.945929535, 179.947464831, 180.947995763, 181.950151849,
182.951372616, 183.954007966, 184.955559375, 185.958552023, 186.960530,
187.963700, 188.965830, 189.969230, 183.950931188, 157.974562, 158.972918,
159.968478805, 160.967357, 161.963497417, 162.962523542, 163.958954382,
164.958279949, 165.955027253, 166.954816014, 167.951808394, 168.95177879,
169.949228482, 170.949451, 171.947292, 172.947689, 173.946079, 174.946717,
175.945634, 176.946643, 177.945876236, 178.947070447, 179.946704459,
180.948197248, 181.948204156, 182.950222951, 183.950931188, 184.953419264,
185.954364127, 186.957160466, 187.958489105, 188.961912868, 189.963181378,
190.966600, 191.968170, 186.955753109, 159.982115, 160.977589119,
161.976002, 162.972080535, 163.970323, 164.967088557, 165.965808,
166.962601, 167.961572608, 168.958791096, 169.958220071, 170.955716,
171.955422961, 172.953243, 173.953115, 174.951381, 175.951623, 176.950328,
177.950989, 178.949987641, 179.950789084, 180.950067916, 181.95121008,
182.950819841, 183.952520756, 184.952954982, 185.954986084, 186.955753109,
187.958114438, 188.959229007, 189.961817977, 190.963125242, 191.965960,
192.967470, 193.970420, 191.96148069, 161.984431, 162.982690,
163.978035649, 164.976762, 165.972690753, 166.971547969, 167.967803678,
168.96701927, 169.963577028, 170.963184819, 171.960023303, 172.959808409,
173.957062202, 174.956945835, 175.954806, 176.954965324, 177.953251241,
178.953816017, 179.952378803, 180.953244, 181.952110186, 182.953126102,
183.952489071, 184.954042265, 185.953838158, 186.955750458, 187.955838228,
188.95814747, 189.958447048, 190.960929718, 191.96148069, 192.964151563,
193.965182083, 194.968126661, 195.969639333, 192.96292643, 163.992201,
164.987520, 165.985824, 166.981665156, 167.979881, 168.976294942, 169.974965,
170.971626042, 171.970456, 172.967501739, 173.966861045, 174.964112895,
175.963648688, 176.9613015, 177.961082, 178.959122266, 179.959229446,
180.957625297, 181.958076296, 182.956846458, 183.957476, 184.956698,
185.957946104, 186.957363361, 187.958853121, 188.958718935, 189.960545968,
190.960594046, 191.962605012, 192.96292643, 193.965078378, 194.965979573,
195.968396542, 196.969653285, 197.972280, 198.973804583, 194.964791134,
165.994855, 166.992979, 167.988150742, 168.986715, 169.982495289,
170.981244542, 171.977347128, 172.976444754, 173.972818767, 174.972420552,
175.968944622, 176.968469481, 177.965648724, 178.965363404, 179.963031477,
180.963097285, 181.961170656, 182.961596703, 183.959922251, 184.960619,
185.959350813, 186.960587, 187.959395391, 188.960833686, 189.959931655,
190.961676661, 191.961038005, 192.962987401, 193.962680253, 194.964791134,
195.964951521, 196.967340182, 197.96789279, 198.970593094, 199.971440677,
200.974512868, 201.975740, 196.966568662, 168.998080, 169.996122,
170.991878881, 171.990035, 172.98623738, 173.984761, 174.981274107,
175.980099, 176.976864908, 177.97603192, 178.973212812, 179.972521124,
180.970079048, 181.969617874, 182.967593034, 183.967451524, 184.965789411,
185.965952703, 186.964567541, 187.965323661, 188.963948286, 189.964700339,
190.963704225, 191.964812953, 192.964149715, 193.96536525, 194.96503464,
195.966569813, 196.966568662, 197.968242303, 198.968765193, 199.970725647,
200.97165724, 201.973805838, 202.975154542, 203.977724, 204.979870,
201.970643011, 171.003760, 171.998832686, 172.997242, 173.992863695,
174.99142327, 175.98735458, 176.986279158, 177.982483143, 178.981833861,
179.978266394, 180.977819311, 181.974689964, 182.974449841, 183.971713051,
184.971899086, 185.96936179, 186.969814236, 187.967577049, 188.968190034,
189.966322449, 190.967157105, 191.965634327, 192.966665421, 193.965439409,
194.966720113, 195.965832649, 196.967212908, 197.966769032, 198.968279932,
199.968326004, 200.970302268, 201.970643011, 202.972872484, 203.973493933,
204.976073386, 205.977514066, 206.982588545, 207.985940, 208.991040,
209.994510, 204.974427541, 176.000590, 176.996427286, 177.994897,
178.991089082, 179.989906, 180.986257447, 181.985667104, 182.982192802,
183.981873122, 184.978791305, 185.978325, 186.975905897, 187.976009782,
188.973588428, 189.973877149, 190.971786154, 191.972225, 192.970672,
193.9712, 194.969774335, 195.970481151, 196.969574511, 197.970483495,
198.969877, 199.970962672, 200.970818891, 201.972105808, 202.97234422,
203.973863522, 204.974427541, 205.97611032, 206.977419429, 207.9820187,
208.985358952, 209.990073689, 210.993477, 211.998228, 207.976652071,
178.003830191, 179.002150, 179.997918173, 180.996623958, 181.992671842,
182.991874629, 183.988142339, 184.987609944, 185.984238945, 186.98391837,
187.980874338, 188.980807, 189.978081517, 190.978265, 191.975785171,
192.976173234, 193.97401207, 194.97454205, 195.972774109, 196.973431124,
197.972033959, 198.97291665, 199.971826675, 200.972884511, 201.972159133,
202.973390521, 203.973043589, 204.974481755, 205.974465278, 206.975896887,
207.976652071, 208.98109012, 209.984188527, 210.988736964, 211.991897543,
212.996581499, 213.999805408, 215.004807, 208.980398734, 184.001124,
184.997625, 185.996597625, 186.993157835, 187.992265154, 188.989199012,
189.988295129, 190.985786119, 191.985457954, 192.982959771, 193.98283396,
194.980650737, 195.980666509, 196.978864454, 197.979206, 198.977671961,
199.978131829, 200.977009036, 201.977742324, 202.976876001, 203.977812736,
204.977389366, 205.97849913, 206.978470679, 207.979742196, 208.980398734,
209.984120371, 210.98726946, 211.991285724, 212.994384666, 213.998711539,
215.001769776, 216.006305943, 217.009470, 218.014316, 208.982430435,
187.999422048, 188.998480562, 189.995101185, 190.994574485, 191.991335149,
192.991025275, 193.988185606, 194.988110728, 195.98553458, 196.98565963,
197.983388616, 198.983666063, 199.981798604, 200.982259764, 201.980757541,
202.981420103, 203.980318121, 204.981203322, 205.980481099, 206.981593173,
207.981245702, 208.982430435, 209.982873673, 210.986653154, 211.988867969,
212.99285728, 213.99520135, 214.999419988, 216.001915035, 217.006334796,
218.008973037, 219.013744, 220.016602, 210.987496271, 192.999843112,
193.998725085, 194.996268098, 195.995788077, 196.993189215, 197.992837202,
198.990532254, 199.990351264, 200.988416999, 201.988630236, 202.986941984,
203.987251326, 204.986074483, 205.986667036, 206.985783502, 207.986589977,
208.986173143, 209.98714771, 210.987496271, 211.990744771, 212.992936646,
213.996371733, 214.99865257, 216.002423257, 217.004718822, 218.008694336,
219.011161691, 220.015407682, 221.018050, 222.022330, 223.025190,
222.017577738, 195.005437696, 196.002115223, 197.001584351, 197.998678663,
198.998370297, 199.9956993, 200.995628335, 201.993263492, 202.993386687,
203.99142874, 204.991718799, 205.990214104, 206.990734225, 207.98964247,
208.990414742, 209.989696216, 210.990600523, 211.990703529, 212.993882668,
213.995362554, 214.998745483, 216.00027437, 217.003927675, 218.005601256,
219.009480204, 220.011393981, 221.015536782, 222.017577738, 223.021790,
224.024090, 225.028440, 226.030890, 227.035407, 228.037986, 222.01755173,
199.007258147, 200.00657249, 201.003860867, 202.003372847, 203.000924647,
204.000653204, 204.99859396, 205.998666066, 206.996949414, 207.997138783,
208.995953555, 209.996407738, 210.995536544, 211.996202244, 212.996189081,
213.998971145, 215.000341497, 216.00319799, 217.004631951, 218.007578322,
219.009252149, 220.012327405, 221.014254762, 222.01755173, 223.019735857,
224.023249951, 225.025565414, 226.029386231, 227.031835938, 228.035729,
229.038450228, 230.042510, 231.045440, 232.049772, 228.031070292,
202.009890686, 203.009271619, 204.006499668, 205.00626857, 206.00382727,
207.003798105, 208.00183994, 209.001991373, 210.000494978, 211.000897987,
211.999794499, 213.000383959, 214.000107894, 215.002719834, 216.003533035,
217.006320327, 218.00714023, 219.010085078, 220.011028384, 221.013917338,
222.01537453, 223.018502171, 224.020211821, 225.023611564, 226.025409823,
227.029177842, 228.031070292, 229.034957577, 230.037056394, 231.041220,
232.043638, 233.048060, 234.050704, 227.027752127, 206.01450498,
207.011949748, 208.011551551, 209.009494863, 210.009435986, 211.007734835,
212.007813822, 213.006607643, 214.006901798, 215.006453625, 216.008720075,
217.009346914, 218.011641453, 219.012420389, 220.014762979, 221.015591248,
222.017843851, 223.019137468, 224.021722866, 225.023229585, 226.026098089,
227.027752127, 228.031021112, 229.033015243, 230.036294178, 231.038558786,
232.042027438, 233.044550, 234.048420, 235.051232, 236.055296,
232.038055325, 209.017715682, 210.015075342, 211.014928413, 212.012980288,
213.01301014, 214.01149977, 215.01173033, 216.011062115, 217.013114328,
218.013284499, 219.015536895, 220.015747762, 221.018183674, 222.018468121,
223.020811448, 224.021466895, 225.023951021, 226.024903069, 227.02770407,
228.028741127, 229.03176243, 230.033133843, 231.036304343, 232.038055325,
233.041581843, 234.04360123, 235.047510074, 236.049870, 237.053894,
238.056496, 231.03588399, 212.023204138, 213.02110934, 214.020918417,
215.019185865, 216.019109564, 217.018323986, 218.020041889, 219.019883143,
220.021875303, 221.021877983, 222.023742, 223.023962273, 224.025625738,
225.026130678, 226.027947753, 227.028805072, 228.031051376, 229.032096793,
230.034540754, 231.03588399, 232.038591592, 233.040247277, 234.043308058,
235.045443615, 236.048681284, 237.051145659, 238.05450271, 239.057260,
240.060980, 238.050788247, 217.024368791, 218.023535671, 219.02491916,
220.024723, 221.026399, 222.026086, 223.0277386, 224.027604778,
225.029390717, 226.029338702, 227.031156367, 228.031374006, 229.033505939,
230.033939784, 231.036293704, 232.037156152, 233.039635207, 234.040952088,
235.043929918, 236.045568006, 237.048730184, 238.050788247, 239.054293299,
240.056591988, 241.060330, 242.062931, 237.048173444, 225.033913933,
226.035145, 227.034956789, 228.036180, 229.036263808, 230.037827597,
231.038245085, 232.040108, 233.040740546, 234.042895038, 235.044063267,
236.0465696, 237.048173444, 238.050946405, 239.052939025, 240.056162182,
241.058252431, 242.06164118, 243.064279, 244.067850, 242.058742611,
228.038742328, 229.040150212, 230.039649886, 231.041101107, 232.041187097,
233.042997375, 234.043317076, 235.04528605, 236.046057964, 237.048409658,
238.049559894, 239.052163381, 240.053813545, 241.056851456, 242.058742611,
243.062003092, 244.064203907, 245.067747154, 246.070204627, 247.074070,
243.06138108, 231.045560, 232.046590, 233.046348, 234.047809, 235.047946,
236.049579, 237.049996, 238.051984324, 239.053024479, 240.055300179,
241.056829144, 242.059549159, 243.06138108, 244.064284847, 245.066452114,
246.069774619, 247.072093, 248.075752, 249.078480, 247.07035354,
233.050771232, 234.050159841, 235.051434, 236.051413, 237.052901,
238.053028697, 239.054957, 240.055529539, 241.057653001, 242.058835824,
243.061389114, 244.062752578, 245.065491249, 246.067223662, 247.07035354,
248.072348508, 249.075953413, 250.078356959, 251.082284605, 252.084870,
247.07030708, 235.056580, 236.057330, 237.057003, 238.058281, 239.058279,
240.059759, 241.060230, 242.061981, 243.063007572, 244.065180774,
245.066361616, 246.068672947, 247.07030708, 248.073086, 249.074986657,
250.07831652, 251.080760172, 252.084310, 253.086880, 254.090600,
251.079586788, 237.062070, 238.061410, 239.062422, 240.062302, 241.063726,
242.063701552, 243.065427, 244.066000689, 245.068048612, 246.068805309,
247.071000589, 248.072184861, 249.074853537, 250.076406066, 251.079586788,
252.081625846, 253.085133145, 254.087322909, 255.091046, 256.093440,
252.082978512, 240.068920, 241.068538, 242.069745, 243.069548, 244.070883,
245.071324, 246.072896, 247.073656, 248.075471, 249.076411, 250.078612,
251.079992142, 252.082978512, 253.084824697, 254.088022021, 255.090273122,
256.093598, 257.095979, 258.099520, 257.095104724, 242.073430, 243.074353,
244.074084, 245.075385, 246.075299023, 247.076847, 248.077194714,
249.079034, 250.079521264, 251.081575017, 252.082466855, 253.085185236,
254.08685422, 255.089962202, 256.091773117, 257.095104724, 258.097076,
259.100595, 260.102678, 258.098431319, 245.080829, 246.081886, 247.081635,
248.082823, 249.083013, 250.084420, 251.084839, 252.086560, 253.087280,
254.089656, 255.091082705, 256.094059025, 257.095541368, 258.098431319,
259.100509, 260.103652, 261.105721, 262.108865, 255.093241131, 248.086596,
249.087833, 250.087510, 251.089012, 252.088976521, 253.090678,
254.090955253, 255.093241131, 256.094282666, 257.09687719, 258.098207,
259.101031, 260.102643, 261.105749, 262.107301, 263.110552, 264.112345,
260.105504, 251.094360, 252.095371, 253.095210, 254.096454, 255.096681,
256.098629, 257.099555, 258.101814, 259.102901, 260.105504, 261.106883,
262.109634, 263.111293, 264.114038, 265.115839, 266.119305, 263.112547,
253.100689, 254.100184, 255.101340, 256.101166194, 257.102990,
258.103489, 259.105637, 260.106440, 261.108766556, 262.109925, 263.112547,
264.113985, 265.116704, 266.117956, 267.121529, 268.123644, 255.107398,
255.107398, 256.108127, 257.107722, 258.109231, 259.109610, 260.111300,
261.112056, 262.114084, 263.114988, 264.117404, 265.118601, 266.121029,
267.122377, 268.125445, 269.127460, 270.130712, 259.114500, 258.113168,
259.114500, 260.114422071, 261.116117, 262.116398, 263.118322, 264.118931,
265.121114693, 266.122065, 267.124425, 268.125606, 269.128755, 270.130329,
271.133472, 272.135158, 273.138220, 262.122892, 260.121970, 261.121664,
262.122892, 263.123035, 264.124604, 265.125147, 266.126942, 267.127650,
268.129755, 269.130694, 270.133616, 271.135179, 272.138032, 273.139618,
274.142440, 275.144250, 263.128558, 263.128558, 264.128394885, 265.130085,
266.130097, 267.131789, 268.132162, 269.134056, 270.134650, 271.137657,
272.139052, 273.141986, 274.143131, 275.145952, 276.147208, 277.149841,
265.136151, 265.136151, 266.137299, 267.137307, 268.138728, 269.139055,
270.140657, 271.141139, 272.143738, 273.144913, 274.147492, 275.148647,
276.151156, 277.152420, 278.154812, 279.156193, 281.162061, 267.144341,
268.143795, 269.145124, 270.144720, 271.146062, 272.146317, 273.148863,
274.149492, 275.152176, 276.153034, 277.155647, 278.156469, 279.158861,
280.159795, 281.162061, 272.153615, 272.153615, 273.153682, 274.155713,
275.156142, 276.158493, 277.159519, 278.161604, 279.162468, 280.164473,
281.165372, 282.167486, 283.168415, 283.171792, 277.163943, 278.164312,
279.166546, 280.167039, 281.169286, 282.169765, 283.171792, 284.172384,
285.174105, 283.176451, 283.176451, 284.178080, 285.178732, 286.180481,
287.181045, 285.183698, 285.183698, 286.183855, 287.185599, 288.185689,
289.187279, 287.191186, 287.191186, 288.192492, 289.192715, 290.194141,
291.194384, 292.199786, 289.198862, 290.198590, 291.200011, 292.199786,
291.206564, 291.206564, 292.207549, 293.214670, 293.214670]
el2mass = dict(zip(_temp_symbol, _temp_mass))
el2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100.
eliso2mass = dict(zip(_temp_iso_symbol, _temp_iso_mass)) # encompasses el2mass
eliso2mass["GH"] = 0. # note that ghost atoms in Cfour have mass 100. # encompasses el2mass
#eliso2mass["X0"] = 0. # probably needed, just checking
el2z = dict(zip(_temp_symbol, _temp_z))
el2z["GH"] = 0
z2mass = dict(zip(_temp_z, _temp_mass))
z2el = dict(zip(_temp_z, _temp_symbol))
z2element = dict(zip(_temp_z, _temp_element))
el2element = dict(zip(_temp_symbol, _temp_element))
| lgpl-3.0 |
draugiskisprendimai/odoo | addons/base_geolocalize/__openerp__.py | 211 | 1458 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013_Today OpenERP SA (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partners Geo-Localization',
'version': '1.0',
'category': 'Customer Relationship Management',
'description': """
Partners geolocalization
========================
""",
'author': 'OpenERP SA',
'depends': ['crm'],
'demo': [
],
'data': [
'views/res_partner_view.xml',
],
'test': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
2uller/LotF | App/Lib/test/test_threading_local.py | 11 | 6905 | import unittest
from doctest import DocTestSuite
from test import test_support
import weakref
import gc
# Modules under test
_thread = test_support.import_module('thread')
threading = test_support.import_module('threading')
import _threading_local
class Weak(object):
pass
def target(local, weaklist):
weak = Weak()
local.weak = weak
weaklist.append(weakref.ref(weak))
class BaseLocalTest:
def test_local_refs(self):
self._local_refs(20)
self._local_refs(50)
self._local_refs(100)
def _local_refs(self, n):
local = self._local()
weaklist = []
for i in range(n):
t = threading.Thread(target=target, args=(local, weaklist))
t.start()
t.join()
del t
gc.collect()
self.assertEqual(len(weaklist), n)
# XXX _threading_local keeps the local of the last stopped thread alive.
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n))
# Assignment to the same thread local frees it sometimes (!)
local.someothervar = None
gc.collect()
deadlist = [weak for weak in weaklist if weak() is None]
self.assertIn(len(deadlist), (n-1, n), (n, len(deadlist)))
def test_derived(self):
# Issue 3088: if there is a threads switch inside the __init__
# of a threading.local derived class, the per-thread dictionary
# is created but not correctly set on the object.
# The first member set may be bogus.
import time
class Local(self._local):
def __init__(self):
time.sleep(0.01)
local = Local()
def f(i):
local.x = i
# Simply check that the variable is correctly set
self.assertEqual(local.x, i)
threads= []
for i in range(10):
t = threading.Thread(target=f, args=(i,))
t.start()
threads.append(t)
for t in threads:
t.join()
def test_derived_cycle_dealloc(self):
# http://bugs.python.org/issue6990
class Local(self._local):
pass
locals = None
passed = [False]
e1 = threading.Event()
e2 = threading.Event()
def f():
# 1) Involve Local in a cycle
cycle = [Local()]
cycle.append(cycle)
cycle[0].foo = 'bar'
# 2) GC the cycle (triggers threadmodule.c::local_clear
# before local_dealloc)
del cycle
gc.collect()
e1.set()
e2.wait()
# 4) New Locals should be empty
passed[0] = all(not hasattr(local, 'foo') for local in locals)
t = threading.Thread(target=f)
t.start()
e1.wait()
# 3) New Locals should recycle the original's address. Creating
# them in the thread overwrites the thread state and avoids the
# bug
locals = [Local() for i in range(10)]
e2.set()
t.join()
self.assertTrue(passed[0])
def test_arguments(self):
# Issue 1522237
from thread import _local as local
from _threading_local import local as py_local
for cls in (local, py_local):
class MyLocal(cls):
def __init__(self, *args, **kwargs):
pass
MyLocal(a=1)
MyLocal(1)
self.assertRaises(TypeError, cls, a=1)
self.assertRaises(TypeError, cls, 1)
def _test_one_class(self, c):
self._failed = "No error message set or cleared."
obj = c()
e1 = threading.Event()
e2 = threading.Event()
def f1():
obj.x = 'foo'
obj.y = 'bar'
del obj.y
e1.set()
e2.wait()
def f2():
try:
foo = obj.x
except AttributeError:
# This is expected -- we haven't set obj.x in this thread yet!
self._failed = "" # passed
else:
self._failed = ('Incorrectly got value %r from class %r\n' %
(foo, c))
sys.stderr.write(self._failed)
t1 = threading.Thread(target=f1)
t1.start()
e1.wait()
t2 = threading.Thread(target=f2)
t2.start()
t2.join()
# The test is done; just let t1 know it can exit, and wait for it.
e2.set()
t1.join()
self.assertFalse(self._failed, self._failed)
def test_threading_local(self):
self._test_one_class(self._local)
def test_threading_local_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_one_class(LocalSubclass)
def _test_dict_attribute(self, cls):
obj = cls()
obj.x = 5
self.assertEqual(obj.__dict__, {'x': 5})
with self.assertRaises(AttributeError):
obj.__dict__ = {}
with self.assertRaises(AttributeError):
del obj.__dict__
def test_dict_attribute(self):
self._test_dict_attribute(self._local)
def test_dict_attribute_subclass(self):
class LocalSubclass(self._local):
"""To test that subclasses behave properly."""
self._test_dict_attribute(LocalSubclass)
class ThreadLocalTest(unittest.TestCase, BaseLocalTest):
_local = _thread._local
# Fails for the pure Python implementation
def test_cycle_collection(self):
class X:
pass
x = X()
x.local = self._local()
x.local.x = x
wr = weakref.ref(x)
del x
gc.collect()
self.assertIs(wr(), None)
class PyThreadingLocalTest(unittest.TestCase, BaseLocalTest):
_local = _threading_local.local
def test_main():
suite = unittest.TestSuite()
suite.addTest(DocTestSuite('_threading_local'))
suite.addTest(unittest.makeSuite(ThreadLocalTest))
suite.addTest(unittest.makeSuite(PyThreadingLocalTest))
try:
from thread import _local
except ImportError:
pass
else:
import _threading_local
local_orig = _threading_local.local
def setUp(test):
_threading_local.local = _local
def tearDown(test):
_threading_local.local = local_orig
suite.addTest(DocTestSuite('_threading_local',
setUp=setUp, tearDown=tearDown)
)
test_support.run_unittest(suite)
if __name__ == '__main__':
test_main()
| gpl-2.0 |
prakritish/ansible | lib/ansible/modules/network/f5/bigip_gtm_datacenter.py | 15 | 11076 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2016 F5 Networks Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: bigip_gtm_datacenter
short_description: Manage Datacenter configuration in BIG-IP
description:
- Manage BIG-IP data center configuration. A data center defines the location
where the physical network components reside, such as the server and link
objects that share the same subnet on the network. This module is able to
manipulate the data center definitions in a BIG-IP
version_added: "2.2"
options:
contact:
description:
- The name of the contact for the data center.
description:
description:
- The description of the data center.
enabled:
description:
- Whether the data center should be enabled. At least one of C(state) and
C(enabled) are required.
choices:
- yes
- no
location:
description:
- The location of the data center.
name:
description:
- The name of the data center.
required: true
state:
description:
- The state of the datacenter on the BIG-IP. When C(present), guarantees
that the data center exists. When C(absent) removes the data center
from the BIG-IP. C(enabled) will enable the data center and C(disabled)
will ensure the data center is disabled. At least one of state and
enabled are required.
choices:
- present
- absent
notes:
- Requires the f5-sdk Python package on the host. This is as easy as
pip install f5-sdk.
extends_documentation_fragment: f5
requirements:
- f5-sdk
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = '''
- name: Create data center "New York"
bigip_gtm_datacenter:
server: "big-ip"
name: "New York"
location: "222 West 23rd"
delegate_to: localhost
'''
RETURN = '''
contact:
description: The contact that was set on the datacenter
returned: changed
type: string
sample: "admin@root.local"
description:
description: The description that was set for the datacenter
returned: changed
type: string
sample: "Datacenter in NYC"
enabled:
description: Whether the datacenter is enabled or not
returned: changed
type: bool
sample: true
location:
description: The location that is set for the datacenter
returned: changed
type: string
sample: "222 West 23rd"
name:
description: Name of the datacenter being manipulated
returned: changed
type: string
sample: "foo"
'''
try:
from f5.bigip import ManagementRoot
from icontrol.session import iControlUnexpectedHTTPError
HAS_F5SDK = True
except ImportError:
HAS_F5SDK = False
class BigIpGtmDatacenter(object):
def __init__(self, *args, **kwargs):
if not HAS_F5SDK:
raise F5ModuleError("The python f5-sdk module is required")
# The params that change in the module
self.cparams = dict()
# Stores the params that are sent to the module
self.params = kwargs
self.api = ManagementRoot(kwargs['server'],
kwargs['user'],
kwargs['password'],
port=kwargs['server_port'])
def create(self):
params = dict()
check_mode = self.params['check_mode']
contact = self.params['contact']
description = self.params['description']
location = self.params['location']
name = self.params['name']
partition = self.params['partition']
enabled = self.params['enabled']
# Specifically check for None because a person could supply empty
# values which would technically still be valid
if contact is not None:
params['contact'] = contact
if description is not None:
params['description'] = description
if location is not None:
params['location'] = location
if enabled is not None:
params['enabled'] = True
else:
params['disabled'] = False
params['name'] = name
params['partition'] = partition
self.cparams = camel_dict_to_snake_dict(params)
if check_mode:
return True
d = self.api.tm.gtm.datacenters.datacenter
d.create(**params)
if not self.exists():
raise F5ModuleError("Failed to create the datacenter")
return True
def read(self):
"""Read information and transform it
The values that are returned by BIG-IP in the f5-sdk can have encoding
attached to them as well as be completely missing in some cases.
Therefore, this method will transform the data from the BIG-IP into a
format that is more easily consumable by the rest of the class and the
parameters that are supported by the module.
"""
p = dict()
name = self.params['name']
partition = self.params['partition']
r = self.api.tm.gtm.datacenters.datacenter.load(
name=name,
partition=partition
)
if hasattr(r, 'servers'):
# Deliberately using sets to suppress duplicates
p['servers'] = set([str(x) for x in r.servers])
if hasattr(r, 'contact'):
p['contact'] = str(r.contact)
if hasattr(r, 'location'):
p['location'] = str(r.location)
if hasattr(r, 'description'):
p['description'] = str(r.description)
if r.enabled:
p['enabled'] = True
else:
p['enabled'] = False
p['name'] = name
return p
def update(self):
changed = False
params = dict()
current = self.read()
check_mode = self.params['check_mode']
contact = self.params['contact']
description = self.params['description']
location = self.params['location']
name = self.params['name']
partition = self.params['partition']
enabled = self.params['enabled']
if contact is not None:
if 'contact' in current:
if contact != current['contact']:
params['contact'] = contact
else:
params['contact'] = contact
if description is not None:
if 'description' in current:
if description != current['description']:
params['description'] = description
else:
params['description'] = description
if location is not None:
if 'location' in current:
if location != current['location']:
params['location'] = location
else:
params['location'] = location
if enabled is not None:
if current['enabled'] != enabled:
if enabled is True:
params['enabled'] = True
params['disabled'] = False
else:
params['disabled'] = True
params['enabled'] = False
if params:
changed = True
if check_mode:
return changed
self.cparams = camel_dict_to_snake_dict(params)
else:
return changed
r = self.api.tm.gtm.datacenters.datacenter.load(
name=name,
partition=partition
)
r.update(**params)
r.refresh()
return True
def delete(self):
params = dict()
check_mode = self.params['check_mode']
params['name'] = self.params['name']
params['partition'] = self.params['partition']
self.cparams = camel_dict_to_snake_dict(params)
if check_mode:
return True
dc = self.api.tm.gtm.datacenters.datacenter.load(**params)
dc.delete()
if self.exists():
raise F5ModuleError("Failed to delete the datacenter")
return True
def present(self):
changed = False
if self.exists():
changed = self.update()
else:
changed = self.create()
return changed
def absent(self):
changed = False
if self.exists():
changed = self.delete()
return changed
def exists(self):
name = self.params['name']
partition = self.params['partition']
return self.api.tm.gtm.datacenters.datacenter.exists(
name=name,
partition=partition
)
def flush(self):
result = dict()
state = self.params['state']
enabled = self.params['enabled']
if state is None and enabled is None:
module.fail_json(msg="Neither 'state' nor 'enabled' set")
try:
if state == "present":
changed = self.present()
# Ensure that this field is not returned to the user since it
# is not a valid parameter to the module.
if 'disabled' in self.cparams:
del self.cparams['disabled']
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
result.update(**self.cparams)
result.update(dict(changed=changed))
return result
def main():
argument_spec = f5_argument_spec()
meta_args = dict(
contact=dict(required=False, default=None),
description=dict(required=False, default=None),
enabled=dict(required=False, type='bool', default=None, choices=BOOLEANS),
location=dict(required=False, default=None),
name=dict(required=True)
)
argument_spec.update(meta_args)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True
)
try:
obj = BigIpGtmDatacenter(check_mode=module.check_mode, **module.params)
result = obj.flush()
module.exit_json(**result)
except F5ModuleError as e:
module.fail_json(msg=str(e))
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import camel_dict_to_snake_dict
from ansible.module_utils.f5_utils import *
if __name__ == '__main__':
main()
| gpl-3.0 |
tschmorleiz/amcat | amcat/scripts/article_upload/controller.py | 1 | 3148 | from __future__ import absolute_import
###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
"""
Module for running scrapers
"""
import logging;log = logging.getLogger(__name__)
from collections import namedtuple
from amcat.models import Article, Project
ScrapeError = namedtuple("ScrapeError", ["i", "unit", "error"])
class Controller(object):
def __init__(self):
self.errors = []
self.articles = []
def run(self, scraper):
try:
units = list(scraper._get_units())
except Exception as e:
self.errors.append(ScrapeError(None,None,e))
log.exception("scraper._get_units failed")
return self.articles
for i, unit in enumerate(units):
try:
articles = list(scraper._scrape_unit(unit))
except Exception as e:
log.exception("scraper._scrape_unit failed")
self.errors.append(ScrapeError(i,unit,e))
continue
self.articles += articles
for article in self.articles:
_set_default(article, 'project', scraper.project)
try:
articles, errors = Article.create_articles(self.articles, scraper.articleset)
self.saved_article_ids = {getattr(a, "duplicate_of", a.id) for a in self.articles}
for e in errors:
self.errors.append(ScrapeError(None,None,e))
except Exception as e:
self.errors.append(ScrapeError(None,None,e))
log.exception("scraper._get_units failed")
return self.saved_article_ids
def _set_default(obj, attr, val):
try:
if getattr(obj, attr, None) is not None: return
except Project.DoesNotExist:
pass # django throws DNE on x.y if y is not set and not nullable
setattr(obj, attr, val)
| agpl-3.0 |
nvbn/guessit | guessit/transfo/guess_episodes_rexps.py | 1 | 2555 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import Guess
from guessit.transfo import SingleNodeGuesser
from guessit.patterns.episode import episode_rexps
import re
import logging
log = logging.getLogger(__name__)
def number_list(s):
l = [int(n) for n in re.sub('[^0-9]+', ' ', s).split()]
if len(l) == 2:
# it is an episode interval, return all numbers in between
return list(range(l[0], l[1] + 1))
return l
def guess_episodes_rexps(string):
for rexp, confidence, span_adjust in episode_rexps:
match = re.search(rexp, string, re.IGNORECASE)
if match:
span = (match.start() + span_adjust[0],
match.end() + span_adjust[1])
guess = Guess(match.groupdict(), confidence=confidence, input=string, span=span)
# decide whether we have only a single episode number or an
# episode list
if guess.get('episodeNumber'):
eplist = number_list(guess['episodeNumber'])
guess.set('episodeNumber', eplist[0], confidence=confidence, input=string, span=span)
if len(eplist) > 1:
guess.set('episodeList', eplist, confidence=confidence, input=string, span=span)
if guess.get('bonusNumber'):
eplist = number_list(guess['bonusNumber'])
guess.set('bonusNumber', eplist[0], confidence=confidence, input=string, span=span)
return guess, span
return None, None
priority = 20
def should_process(matcher):
return matcher.match_tree.guess['type'] in ('episode', 'episodesubtitle', 'episodeinfo')
def process(mtree):
SingleNodeGuesser(guess_episodes_rexps, None, log).process(mtree)
| lgpl-3.0 |
bolkedebruin/airflow | airflow/providers/apache/druid/operators/hive_to_druid.py | 1 | 10963 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""
This module contains operator to move data from Hive to Druid.
"""
from typing import Dict, List, Optional
from airflow.models import BaseOperator
from airflow.providers.apache.druid.hooks.druid import DruidHook
from airflow.providers.apache.hive.hooks.hive import HiveCliHook, HiveMetastoreHook
from airflow.utils.decorators import apply_defaults
LOAD_CHECK_INTERVAL = 5
DEFAULT_TARGET_PARTITION_SIZE = 5000000
class HiveToDruidTransfer(BaseOperator):
"""
Moves data from Hive to Druid, [del]note that for now the data is loaded
into memory before being pushed to Druid, so this operator should
be used for smallish amount of data.[/del]
:param sql: SQL query to execute against the Druid database. (templated)
:type sql: str
:param druid_datasource: the datasource you want to ingest into in druid
:type druid_datasource: str
:param ts_dim: the timestamp dimension
:type ts_dim: str
:param metric_spec: the metrics you want to define for your data
:type metric_spec: list
:param hive_cli_conn_id: the hive connection id
:type hive_cli_conn_id: str
:param druid_ingest_conn_id: the druid ingest connection id
:type druid_ingest_conn_id: str
:param metastore_conn_id: the metastore connection id
:type metastore_conn_id: str
:param hadoop_dependency_coordinates: list of coordinates to squeeze
int the ingest json
:type hadoop_dependency_coordinates: list[str]
:param intervals: list of time intervals that defines segments,
this is passed as is to the json object. (templated)
:type intervals: list
:param num_shards: Directly specify the number of shards to create.
:type num_shards: float
:param target_partition_size: Target number of rows to include in a partition,
:type target_partition_size: int
:param query_granularity: The minimum granularity to be able to query results at and the granularity of
the data inside the segment. E.g. a value of "minute" will mean that data is aggregated at minutely
granularity. That is, if there are collisions in the tuple (minute(timestamp), dimensions), then it
will aggregate values together using the aggregators instead of storing individual rows.
A granularity of 'NONE' means millisecond granularity.
:type query_granularity: str
:param segment_granularity: The granularity to create time chunks at. Multiple segments can be created per
time chunk. For example, with 'DAY' segmentGranularity, the events of the same day fall into the
same time chunk which can be optionally further partitioned into multiple segments based on other
configurations and input size.
:type segment_granularity: str
:param hive_tblproperties: additional properties for tblproperties in
hive for the staging table
:type hive_tblproperties: dict
:param job_properties: additional properties for job
:type job_properties: dict
"""
template_fields = ('sql', 'intervals')
template_ext = ('.sql',)
@apply_defaults
def __init__( # pylint: disable=too-many-arguments
self,
sql: str,
druid_datasource: str,
ts_dim: str,
metric_spec: Optional[List] = None,
hive_cli_conn_id: str = 'hive_cli_default',
druid_ingest_conn_id: str = 'druid_ingest_default',
metastore_conn_id: str = 'metastore_default',
hadoop_dependency_coordinates: Optional[List[str]] = None,
intervals: Optional[List] = None,
num_shards: float = -1,
target_partition_size: int = -1,
query_granularity: str = "NONE",
segment_granularity: str = "DAY",
hive_tblproperties: Optional[Dict] = None,
job_properties: Optional[Dict] = None,
*args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.sql = sql
self.druid_datasource = druid_datasource
self.ts_dim = ts_dim
self.intervals = intervals or ['{{ ds }}/{{ tomorrow_ds }}']
self.num_shards = num_shards
self.target_partition_size = target_partition_size
self.query_granularity = query_granularity
self.segment_granularity = segment_granularity
self.metric_spec = metric_spec or [{
"name": "count",
"type": "count"}]
self.hive_cli_conn_id = hive_cli_conn_id
self.hadoop_dependency_coordinates = hadoop_dependency_coordinates
self.druid_ingest_conn_id = druid_ingest_conn_id
self.metastore_conn_id = metastore_conn_id
self.hive_tblproperties = hive_tblproperties or {}
self.job_properties = job_properties
def execute(self, context):
hive = HiveCliHook(hive_cli_conn_id=self.hive_cli_conn_id)
self.log.info("Extracting data from Hive")
hive_table = 'druid.' + context['task_instance_key_str'].replace('.', '_')
sql = self.sql.strip().strip(';')
tblproperties = ''.join([", '{}' = '{}'"
.format(k, v)
for k, v in self.hive_tblproperties.items()])
hql = f"""\
SET mapred.output.compress=false;
SET hive.exec.compress.output=false;
DROP TABLE IF EXISTS {hive_table};
CREATE TABLE {hive_table}
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE
TBLPROPERTIES ('serialization.null.format' = ''{tblproperties})
AS
{sql}
"""
self.log.info("Running command:\n %s", hql)
hive.run_cli(hql)
meta_hook = HiveMetastoreHook(self.metastore_conn_id)
# Get the Hive table and extract the columns
table = meta_hook.get_table(hive_table)
columns = [col.name for col in table.sd.cols]
# Get the path on hdfs
static_path = meta_hook.get_table(hive_table).sd.location
druid = DruidHook(druid_ingest_conn_id=self.druid_ingest_conn_id)
try:
index_spec = self.construct_ingest_query(
static_path=static_path,
columns=columns,
)
self.log.info("Inserting rows into Druid, hdfs path: %s", static_path)
druid.submit_indexing_job(index_spec)
self.log.info("Load seems to have succeeded!")
finally:
self.log.info(
"Cleaning up by dropping the temp Hive table %s",
hive_table
)
hql = "DROP TABLE IF EXISTS {}".format(hive_table)
hive.run_cli(hql)
def construct_ingest_query(self, static_path, columns):
"""
Builds an ingest query for an HDFS TSV load.
:param static_path: The path on hdfs where the data is
:type static_path: str
:param columns: List of all the columns that are available
:type columns: list
"""
# backward compatibility for num_shards,
# but target_partition_size is the default setting
# and overwrites the num_shards
num_shards = self.num_shards
target_partition_size = self.target_partition_size
if self.target_partition_size == -1:
if self.num_shards == -1:
target_partition_size = DEFAULT_TARGET_PARTITION_SIZE
else:
num_shards = -1
metric_names = [m['fieldName'] for m in self.metric_spec if m['type'] != 'count']
# Take all the columns, which are not the time dimension
# or a metric, as the dimension columns
dimensions = [c for c in columns if c not in metric_names and c != self.ts_dim]
ingest_query_dict = {
"type": "index_hadoop",
"spec": {
"dataSchema": {
"metricsSpec": self.metric_spec,
"granularitySpec": {
"queryGranularity": self.query_granularity,
"intervals": self.intervals,
"type": "uniform",
"segmentGranularity": self.segment_granularity,
},
"parser": {
"type": "string",
"parseSpec": {
"columns": columns,
"dimensionsSpec": {
"dimensionExclusions": [],
"dimensions": dimensions, # list of names
"spatialDimensions": []
},
"timestampSpec": {
"column": self.ts_dim,
"format": "auto"
},
"format": "tsv"
}
},
"dataSource": self.druid_datasource
},
"tuningConfig": {
"type": "hadoop",
"jobProperties": {
"mapreduce.job.user.classpath.first": "false",
"mapreduce.map.output.compress": "false",
"mapreduce.output.fileoutputformat.compress": "false",
},
"partitionsSpec": {
"type": "hashed",
"targetPartitionSize": target_partition_size,
"numShards": num_shards,
},
},
"ioConfig": {
"inputSpec": {
"paths": static_path,
"type": "static"
},
"type": "hadoop"
}
}
}
if self.job_properties:
ingest_query_dict['spec']['tuningConfig']['jobProperties'] \
.update(self.job_properties)
if self.hadoop_dependency_coordinates:
ingest_query_dict['hadoopDependencyCoordinates'] \
= self.hadoop_dependency_coordinates
return ingest_query_dict
| apache-2.0 |
ppwwyyxx/tensorflow | tensorflow/python/data/ops/multi_device_iterator_ops.py | 4 | 24876 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Python wrapper for prefetching_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.data.ops import iterator_ops
from tensorflow.python.data.util import structure
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.framework import composite_tensor
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import type_spec
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import functional_ops
from tensorflow.python.ops import gen_dataset_ops
from tensorflow.python.ops import resource_variable_ops
class _PerDeviceGenerator(dataset_ops.DatasetV2):
"""A `dummy` generator dataset."""
def __init__(self, shard_num, multi_device_iterator_resource, incarnation_id,
source_device, element_spec):
self._element_spec = element_spec
multi_device_iterator_string_handle = (
gen_dataset_ops.multi_device_iterator_to_string_handle(
multi_device_iterator_resource))
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun(autograph=False) # Pure graph code.
def _init_func():
return multi_device_iterator_string_handle
init_func_concrete = _init_func.get_concrete_function()
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun(autograph=False) # Pure graph code.
def _remote_init_func():
return functional_ops.remote_call(
target=source_device,
args=init_func_concrete.captured_inputs,
Tout=[dtypes.string],
f=init_func_concrete)
self._init_func = _remote_init_func.get_concrete_function()
self._init_captured_args = self._init_func.captured_inputs
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun(
input_signature=[tensor_spec.TensorSpec([], dtypes.string)],
autograph=False) # Pure graph code.
def _next_func(string_handle):
# pylint: disable=protected-access
multi_device_iterator = (
gen_dataset_ops.multi_device_iterator_from_string_handle(
string_handle=string_handle,
output_types=structure.get_flat_tensor_types(self._element_spec),
output_shapes=structure.get_flat_tensor_shapes(
self._element_spec)))
return gen_dataset_ops.multi_device_iterator_get_next_from_shard(
multi_device_iterator=multi_device_iterator,
shard_num=shard_num,
incarnation_id=incarnation_id,
output_types=structure.get_flat_tensor_types(self._element_spec),
output_shapes=structure.get_flat_tensor_shapes(self._element_spec))
next_func_concrete = _next_func.get_concrete_function()
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun_with_attributes(
input_signature=[tensor_spec.TensorSpec([], dtypes.string)],
attributes={"experimental_ints_on_device": True},
autograph=False) # Pure graph code.
def _remote_next_func(string_handle):
return functional_ops.remote_call(
target=source_device,
args=[string_handle] + next_func_concrete.captured_inputs,
Tout=structure.get_flat_tensor_types(self._element_spec),
f=next_func_concrete)
self._next_func = _remote_next_func.get_concrete_function()
self._next_captured_args = self._next_func.captured_inputs
self._incarnation_id_index = -1
for i, arg in enumerate(self._next_captured_args):
if arg is incarnation_id:
self._incarnation_id_index = i
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun(
input_signature=[tensor_spec.TensorSpec([], dtypes.string)],
autograph=False) # Pure graph code.
def _finalize_func(unused_string_handle):
return array_ops.constant(0, dtypes.int64)
finalize_func_concrete = _finalize_func.get_concrete_function()
# TODO(b/124254153): Enable autograph once the overhead is low enough.
@function.defun(
input_signature=[tensor_spec.TensorSpec([], dtypes.string)],
autograph=False) # Pure graph code.
def _remote_finalize_func(string_handle):
return functional_ops.remote_call(
target=source_device,
args=[string_handle] + finalize_func_concrete.captured_inputs,
Tout=[dtypes.int64],
f=finalize_func_concrete)
self._finalize_func = _remote_finalize_func.get_concrete_function()
self._finalize_captured_args = self._finalize_func.captured_inputs
variant_tensor = gen_dataset_ops.generator_dataset(
self._init_captured_args,
self._next_captured_args,
self._finalize_captured_args,
init_func=self._init_func,
next_func=self._next_func,
finalize_func=self._finalize_func,
**self._flat_structure)
super(_PerDeviceGenerator, self).__init__(variant_tensor)
def _inputs(self):
# TODO(b/116506223): Determine which datasets should be used as inputs here.
return []
@property
def element_spec(self):
return self._element_spec
class _ReincarnatedPerDeviceGenerator(dataset_ops.DatasetV2):
"""Creates a _PerDeviceGenerator-like dataset with a new incarnation_id.
Re-uses the functions from the provided per_device_dataset and just switches
out the function argument corresponding to the incarnation_id.
"""
def __init__(self, per_device_dataset, incarnation_id):
# pylint: disable=protected-access
self._element_spec = per_device_dataset.element_spec
self._init_func = per_device_dataset._init_func
self._init_captured_args = self._init_func.captured_inputs
self._next_func = per_device_dataset._next_func
self._next_captured_args = per_device_dataset._next_captured_args
# The captured arguments to the next_func are string_handle, incarnation_id.
# We update the incarnation id to the new one.
self._next_captured_args[
per_device_dataset._incarnation_id_index] = incarnation_id
self._finalize_func = per_device_dataset._finalize_func
self._finalize_captured_args = per_device_dataset._finalize_captured_args
variant_tensor = gen_dataset_ops.generator_dataset(
self._init_captured_args,
self._next_captured_args,
self._finalize_captured_args,
init_func=self._init_func,
next_func=self._next_func,
finalize_func=self._finalize_func,
**self._flat_structure)
super(_ReincarnatedPerDeviceGenerator, self).__init__(variant_tensor)
def _inputs(self):
# TODO(b/116506223): Determine which datasets should be used as inputs here.
return []
@property
def element_spec(self):
return self._element_spec
def _create_device_dataset(prototype_ds, incarnation_id, prefetch_buffer_size,
experimental_slack):
"""Uses _prototype_device_datasets[i] to build a dataset for the device."""
ds = _ReincarnatedPerDeviceGenerator(prototype_ds, incarnation_id)
if prefetch_buffer_size > 0:
if experimental_slack:
ds = dataset_ops.PrefetchDataset(ds, prefetch_buffer_size, slack_period=1)
else:
ds = ds.prefetch(prefetch_buffer_size)
# TODO(jsimsa): Enable auto-tuning and optimizations when supported for
# non-CPU devices.
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.autotune = False
ds = ds.with_options(options)
return ds
class MultiDeviceIterator(object):
"""An iterator over multiple devices."""
def __init__(self,
dataset,
devices,
max_buffer_size=1,
prefetch_buffer_size=1,
source_device="/cpu:0"):
"""Constructs a MultiDeviceIterator.
Args:
dataset: The input dataset to be iterated over.
devices: The list of devices to fetch data to.
max_buffer_size: Maximum size of the host side per device buffer to keep.
prefetch_buffer_size: if > 1, then we setup a buffer on each device to
prefetch into.
source_device: The host device to place the `dataset` on. In order to
prevent deadlocks, if the prefetch_buffer_size is greater than the
max_buffer_size, we set the max_buffer_size to prefetch_buffer_size.
"""
options = dataset_ops.Options()
options.experimental_distribute.num_devices = len(devices)
dataset = dataset.with_options(options)
self._dataset = dataset._apply_options() # pylint: disable=protected-access
self._experimental_slack = dataset.options().experimental_slack
self._devices = devices
self._source_device = source_device
self._source_device_tensor = ops.convert_to_tensor(source_device)
self._max_buffer_size = max_buffer_size
self._prefetch_buffer_size = prefetch_buffer_size
if self._prefetch_buffer_size > self._max_buffer_size:
self._max_buffer_size = self._prefetch_buffer_size
# Create the MultiDeviceIterator.
with ops.device(self._source_device):
# TODO(b/121378567): Get rid of this shared_name hack.
shared_name = ""
if context.executing_eagerly():
shared_name = context.shared_name()
self._multi_device_iterator_resource = (
gen_dataset_ops.multi_device_iterator(
devices=self._devices,
shared_name=shared_name,
container="",
**self._dataset._flat_structure)) # pylint: disable=protected-access
if context.executing_eagerly():
# Delete the resource when this object is deleted
self._resource_deleter = resource_variable_ops.EagerResourceDeleter(
handle=self._multi_device_iterator_resource,
handle_device=self._source_device)
# The incarnation ID is used to ensure consistency between the per-device
# iterators and the multi-device iterator.
self._incarnation_id = gen_dataset_ops.multi_device_iterator_init(
self._dataset._variant_tensor, # pylint: disable=protected-access
self._multi_device_iterator_resource,
max_buffer_size=self._max_buffer_size)
self._prototype_device_datasets = []
for i, device in enumerate(self._devices):
with ops.device(device):
ds = _PerDeviceGenerator(i, self._multi_device_iterator_resource,
self._incarnation_id,
self._source_device_tensor,
self._dataset.element_spec)
self._prototype_device_datasets.append(ds)
# TODO(rohanj): Explore the possibility of the MultiDeviceIterator to
# initialize the device side of the pipeline. This would allow the
# MultiDeviceIterator to choose, for example, to move some transformations
# into the device side from its input. It might be useful in rewriting.
# Create the per device iterators.
self._device_iterators = []
for i, device in enumerate(self._devices):
with ops.device(device):
ds = _create_device_dataset(self._prototype_device_datasets[i],
self._incarnation_id,
self._prefetch_buffer_size,
self._experimental_slack)
if context.executing_eagerly():
self._device_iterators.append(dataset_ops.make_one_shot_iterator(ds))
else:
self._device_iterators.append(
dataset_ops.make_initializable_iterator(ds))
if not context.executing_eagerly():
device_iterator_initializers = [
iterator.initializer for iterator in self._device_iterators
]
self._initializer = control_flow_ops.group(*device_iterator_initializers)
def _create_device_dataset(self, i):
"""Uses _prototype_device_datasets[i] to build a dataset for the device."""
ds = self._prototype_device_datasets[i]
ds = _ReincarnatedPerDeviceGenerator(ds, self._incarnation_id)
if self._prefetch_buffer_size > 0:
if self._experimental_slack:
ds = dataset_ops.PrefetchDataset(
ds, self._prefetch_buffer_size, slack_period=1)
else:
ds = ds.prefetch(self._prefetch_buffer_size)
# TODO(jsimsa): Enable auto-tuning and optimizations when supported for
# non-CPU devices.
options = dataset_ops.Options()
options.experimental_optimization.apply_default_optimizations = False
options.experimental_optimization.autotune = False
ds = ds.with_options(options)
return ds
def get_next(self, device=None):
"""Returns the next element given a `device`, else returns all in a list."""
if device is not None:
index = self._devices.index(device)
return self._device_iterators[index].get_next()
result = []
for i, device in enumerate(self._devices):
with ops.device(device):
result.append(self._device_iterators[i].get_next())
return result
def get_next_as_optional(self):
result = []
for i, device in enumerate(self._devices):
with ops.device(device):
result.append(
iterator_ops.get_next_as_optional(self._device_iterators[i]))
return result
@property
def initializer(self):
if context.executing_eagerly():
return control_flow_ops.no_op()
return self._initializer
def _eager_reset(self):
"""Resets the MultiDeviceIterator in eager mode."""
if not ops.executing_eagerly_outside_functions():
raise ValueError("Eager reset is only supported in eager mode.")
# pylint: disable=protected-access
self._incarnation_id = gen_dataset_ops.multi_device_iterator_init(
self._dataset._variant_tensor,
self._multi_device_iterator_resource,
max_buffer_size=self._max_buffer_size)
for i, device in enumerate(self._devices):
with ops.device(device):
ds = _create_device_dataset(self._prototype_device_datasets[i],
self._incarnation_id,
self._prefetch_buffer_size,
self._experimental_slack)
# Reset the device iterator resources with the new dataset.
ds_variant = ds._variant_tensor
gen_dataset_ops.make_iterator(
ds_variant, self._device_iterators[i]._iterator_resource)
@property
def element_spec(self):
return self._dataset.element_spec
class MultiDeviceIteratorResourceDeleter(object):
"""An object which cleans up a Multi Device Iterator resource.
An alternative to defining a __del__ method on an object. Even if the parent
object is part of a reference cycle, the cycle will be collectible.
"""
def __init__(self, multi_device_iterator, iterators, device, deleter):
self._deleter = deleter
self._multi_device_iterator = multi_device_iterator
self._iterators = iterators
self._device = device
self._eager_mode = context.executing_eagerly()
def __del__(self):
with ops.device(self._device):
# Make sure the resource is deleted in the same mode as it was created in.
# We pass in the iterator handles as inputs to the op to make sure that
# this op runs after all the iterators are deleted.
if self._eager_mode:
with context.eager_mode():
gen_dataset_ops.delete_multi_device_iterator(
multi_device_iterator=self._multi_device_iterator,
iterators=self._iterators,
deleter=self._deleter)
else:
with context.graph_mode():
gen_dataset_ops.delete_multi_device_iterator(
multi_device_iterator=self._multi_device_iterator,
iterators=self._iterators,
deleter=self._deleter)
class MultiDeviceIteratorSpec(type_spec.TypeSpec):
"""Type specification for `OwnedMultiDeviceIterator`."""
__slots__ = ["_devices", "_source_device", "_element_spec"]
def __init__(self, devices, source_device, element_spec):
self._devices = devices
self._source_device = source_device
self._element_spec = element_spec
@property
def value_type(self):
return OwnedMultiDeviceIterator
def _serialize(self):
return (tuple(self._devices), self._source_device, self._element_spec)
@property
def _component_specs(self):
specs = [
tensor_spec.TensorSpec([], dtypes.resource),
tensor_spec.TensorSpec([], dtypes.variant)
]
for _ in range(len(self._devices)):
specs.append(iterator_ops.IteratorSpec(self._element_spec))
return specs
def _to_components(self, value):
# pylint: disable=protected-access
c = [value._multi_device_iterator_resource, value._deleter]
c.extend(value._device_iterators)
return c
def _from_components(self, components):
return OwnedMultiDeviceIterator(
dataset=None,
devices=self._devices,
source_device=self._source_device,
components=components,
element_spec=self._element_spec)
@staticmethod
def from_value(value):
# pylint: disable=protected-access
return MultiDeviceIteratorSpec(
value._devices,
value._source_device,
value.element_spec)
class OwnedMultiDeviceIterator(composite_tensor.CompositeTensor):
"""An iterator over multiple devices.
The multi-device iterator resource created through `OwnedMultiDeviceIterator`
is owned by the Python object and the life time of the underlying resource is
tied to the life time of the `OwnedMultiDeviceIterator` object. This makes
`OwnedMultiDeviceIterator` appropriate for use in eager mode and inside of
tf.functions.
"""
def __init__(self,
dataset=None,
devices=None,
max_buffer_size=1,
prefetch_buffer_size=1,
source_device="/cpu:0",
components=None,
element_spec=None):
"""Constructs an owned MultiDeviceIterator object.
Args:
dataset: The input dataset to be iterated over.
devices: The list of devices to fetch data to.
max_buffer_size: Maximum size of the host side per device buffer to keep.
prefetch_buffer_size: if > 1, then we setup a buffer on each device to
prefetch into.
source_device: The host device to place the `dataset` on. In order to
prevent deadlocks, if the prefetch_buffer_size is greater than the
max_buffer_size, we set the max_buffer_size to prefetch_buffer_size.
components: Tensor components to construct the MultiDeviceIterator from.
element_spec: A nested structure of `TypeSpec` objects that
represents the type specification of elements of the iterator.
Raises:
RuntimeError: If executed in graph mode or outside of function building
mode.
"""
if (not context.executing_eagerly() and
not ops.get_default_graph()._building_function): # pylint: disable=protected-access
raise RuntimeError("OwnedMultiDeviceIterator is only supported inside of "
"tf.function or when eager execution is enabled.")
if devices is None:
raise ValueError("`devices` must be provided")
error_message = "Either `dataset` or both `components` and "
"`element_spec` need to be provided."
if dataset is None:
if (components is None or element_spec is None):
raise ValueError(error_message)
self._element_spec = element_spec
self._devices = devices
self._source_device = source_device
self._multi_device_iterator_resource = components[0]
self._deleter = components[1]
self._device_iterators = components[2:]
iterator_handles = []
for it in self._device_iterators:
iterator_handles.append(it._iterator_resource) # pylint: disable=protected-access
else:
if (components is not None or element_spec is not None):
raise ValueError(error_message)
options = dataset_ops.Options()
options.experimental_distribute.num_devices = len(devices)
dataset = dataset.with_options(options)
dataset = dataset._apply_options() # pylint: disable=protected-access
self._element_spec = dataset.element_spec
experimental_slack = dataset.options().experimental_slack
self._devices = devices
self._source_device = source_device
source_device_tensor = ops.convert_to_tensor(self._source_device)
if prefetch_buffer_size > max_buffer_size:
max_buffer_size = prefetch_buffer_size
# Create the MultiDeviceIterator.
with ops.device(self._source_device):
self._multi_device_iterator_resource, self._deleter = (
gen_dataset_ops.anonymous_multi_device_iterator(
devices=self._devices, **dataset._flat_structure)) # pylint: disable=protected-access
# The incarnation ID is used to ensure consistency between the
# per-device iterators and the multi-device iterator.
incarnation_id = gen_dataset_ops.multi_device_iterator_init(
dataset._variant_tensor, # pylint: disable=protected-access
self._multi_device_iterator_resource,
max_buffer_size=max_buffer_size)
prototype_device_datasets = []
for i, device in enumerate(self._devices):
with ops.device(device):
ds = _PerDeviceGenerator(i, self._multi_device_iterator_resource,
incarnation_id, source_device_tensor,
dataset.element_spec)
prototype_device_datasets.append(ds)
# TODO(rohanj): Explore the possibility of the MultiDeviceIterator to
# initialize the device side of the pipeline. This would allow the
# MultiDeviceIterator to choose, for example, to move some transformations
# into the device side from its input. It might be useful in rewriting.
# Create the per device iterators.
self._device_iterators = []
iterator_handles = []
for i, device in enumerate(self._devices):
with ops.device(device):
ds = _create_device_dataset(prototype_device_datasets[i],
incarnation_id, prefetch_buffer_size,
experimental_slack)
iterator = iter(ds)
self._device_iterators.append(iterator)
iterator_handles.append(iterator._iterator_resource) # pylint: disable=protected-access
self._resource_deleter = MultiDeviceIteratorResourceDeleter(
multi_device_iterator=self._multi_device_iterator_resource,
iterators=iterator_handles,
device=self._source_device,
deleter=self._deleter)
def get_next(self, device=None):
"""Returns the next element given a `device`, else returns all in a list."""
if device is not None:
index = self._devices.index(device)
return self._device_iterators[index].get_next()
result = []
for i, device in enumerate(self._devices):
with ops.device(device):
result.append(self._device_iterators[i].get_next())
return result
def __iter__(self):
return self
def __next__(self):
return self.next()
def next(self):
try:
return self.get_next()
except errors.OutOfRangeError:
raise StopIteration
def get_next_as_optional(self):
result = []
for i, device in enumerate(self._devices):
with ops.device(device):
result.append(
iterator_ops.get_next_as_optional(self._device_iterators[i]))
return result
@property
def element_spec(self):
return self._element_spec
@property
def _type_spec(self):
return MultiDeviceIteratorSpec(self._devices, self._source_device,
self._element_spec)
| apache-2.0 |
Weihonghao/ECM | Vpy34/lib/python3.5/site-packages/scipy/linalg/tests/test_blas.py | 14 | 25459 | #
# Created by: Pearu Peterson, April 2002
#
from __future__ import division, print_function, absolute_import
__usage__ = """
Build linalg:
python setup.py build
Run tests if scipy is installed:
python -c 'import scipy;scipy.linalg.test()'
"""
import math
import numpy as np
from numpy.testing import (TestCase, run_module_suite, assert_equal,
assert_almost_equal, assert_array_almost_equal, assert_raises, assert_,
assert_allclose)
from scipy.linalg import _fblas as fblas, get_blas_funcs
try:
from scipy.linalg import _cblas as cblas
except ImportError:
cblas = None
def test_get_blas_funcs():
# check that it returns Fortran code for arrays that are
# fortran-ordered
f1, f2, f3 = get_blas_funcs(
('axpy', 'axpy', 'axpy'),
(np.empty((2,2), dtype=np.complex64, order='F'),
np.empty((2,2), dtype=np.complex128, order='C'))
)
# get_blas_funcs will choose libraries depending on most generic
# array
assert_equal(f1.typecode, 'z')
assert_equal(f2.typecode, 'z')
if cblas is not None:
assert_equal(f1.module_name, 'cblas')
assert_equal(f2.module_name, 'cblas')
# check defaults.
f1 = get_blas_funcs('rotg')
assert_equal(f1.typecode, 'd')
# check also dtype interface
f1 = get_blas_funcs('gemm', dtype=np.complex64)
assert_equal(f1.typecode, 'c')
f1 = get_blas_funcs('gemm', dtype='F')
assert_equal(f1.typecode, 'c')
# extended precision complex
f1 = get_blas_funcs('gemm', dtype=np.longcomplex)
assert_equal(f1.typecode, 'z')
# check safe complex upcasting
f1 = get_blas_funcs('axpy',
(np.empty((2,2), dtype=np.float64),
np.empty((2,2), dtype=np.complex64))
)
assert_equal(f1.typecode, 'z')
def test_get_blas_funcs_alias():
# check alias for get_blas_funcs
f, g = get_blas_funcs(('nrm2', 'dot'), dtype=np.complex64)
assert f.typecode == 'c'
assert g.typecode == 'c'
f, g, h = get_blas_funcs(('dot', 'dotc', 'dotu'), dtype=np.float64)
assert f is g
assert f is h
class TestCBLAS1Simple(TestCase):
def test_axpy(self):
for p in 'sd':
f = getattr(cblas,p+'axpy',None)
if f is None:
continue
assert_array_almost_equal(f([1,2,3],[2,-1,3],a=5),[7,9,18])
for p in 'cz':
f = getattr(cblas,p+'axpy',None)
if f is None:
continue
assert_array_almost_equal(f([1,2j,3],[2,-1,3],a=5),[7,10j-1,18])
class TestFBLAS1Simple(TestCase):
def test_axpy(self):
for p in 'sd':
f = getattr(fblas,p+'axpy',None)
if f is None:
continue
assert_array_almost_equal(f([1,2,3],[2,-1,3],a=5),[7,9,18])
for p in 'cz':
f = getattr(fblas,p+'axpy',None)
if f is None:
continue
assert_array_almost_equal(f([1,2j,3],[2,-1,3],a=5),[7,10j-1,18])
def test_copy(self):
for p in 'sd':
f = getattr(fblas,p+'copy',None)
if f is None:
continue
assert_array_almost_equal(f([3,4,5],[8]*3),[3,4,5])
for p in 'cz':
f = getattr(fblas,p+'copy',None)
if f is None:
continue
assert_array_almost_equal(f([3,4j,5+3j],[8]*3),[3,4j,5+3j])
def test_asum(self):
for p in 'sd':
f = getattr(fblas,p+'asum',None)
if f is None:
continue
assert_almost_equal(f([3,-4,5]),12)
for p in ['sc','dz']:
f = getattr(fblas,p+'asum',None)
if f is None:
continue
assert_almost_equal(f([3j,-4,3-4j]),14)
def test_dot(self):
for p in 'sd':
f = getattr(fblas,p+'dot',None)
if f is None:
continue
assert_almost_equal(f([3,-4,5],[2,5,1]),-9)
def test_complex_dotu(self):
for p in 'cz':
f = getattr(fblas,p+'dotu',None)
if f is None:
continue
assert_almost_equal(f([3j,-4,3-4j],[2,3,1]),-9+2j)
def test_complex_dotc(self):
for p in 'cz':
f = getattr(fblas,p+'dotc',None)
if f is None:
continue
assert_almost_equal(f([3j,-4,3-4j],[2,3j,1]),3-14j)
def test_nrm2(self):
for p in 'sd':
f = getattr(fblas,p+'nrm2',None)
if f is None:
continue
assert_almost_equal(f([3,-4,5]),math.sqrt(50))
for p in ['c', 'z', 'sc','dz']:
f = getattr(fblas,p+'nrm2',None)
if f is None:
continue
assert_almost_equal(f([3j,-4,3-4j]),math.sqrt(50))
def test_scal(self):
for p in 'sd':
f = getattr(fblas,p+'scal',None)
if f is None:
continue
assert_array_almost_equal(f(2,[3,-4,5]),[6,-8,10])
for p in 'cz':
f = getattr(fblas,p+'scal',None)
if f is None:
continue
assert_array_almost_equal(f(3j,[3j,-4,3-4j]),[-9,-12j,12+9j])
for p in ['cs','zd']:
f = getattr(fblas,p+'scal',None)
if f is None:
continue
assert_array_almost_equal(f(3,[3j,-4,3-4j]),[9j,-12,9-12j])
def test_swap(self):
for p in 'sd':
f = getattr(fblas,p+'swap',None)
if f is None:
continue
x,y = [2,3,1],[-2,3,7]
x1,y1 = f(x,y)
assert_array_almost_equal(x1,y)
assert_array_almost_equal(y1,x)
for p in 'cz':
f = getattr(fblas,p+'swap',None)
if f is None:
continue
x,y = [2,3j,1],[-2,3,7-3j]
x1,y1 = f(x,y)
assert_array_almost_equal(x1,y)
assert_array_almost_equal(y1,x)
def test_amax(self):
for p in 'sd':
f = getattr(fblas,'i'+p+'amax')
assert_equal(f([-2,4,3]),1)
for p in 'cz':
f = getattr(fblas,'i'+p+'amax')
assert_equal(f([-5,4+3j,6]),1)
#XXX: need tests for rot,rotm,rotg,rotmg
class TestFBLAS2Simple(TestCase):
def test_gemv(self):
for p in 'sd':
f = getattr(fblas,p+'gemv',None)
if f is None:
continue
assert_array_almost_equal(f(3,[[3]],[-4]),[-36])
assert_array_almost_equal(f(3,[[3]],[-4],3,[5]),[-21])
for p in 'cz':
f = getattr(fblas,p+'gemv',None)
if f is None:
continue
assert_array_almost_equal(f(3j,[[3-4j]],[-4]),[-48-36j])
assert_array_almost_equal(f(3j,[[3-4j]],[-4],3,[5j]),[-48-21j])
def test_ger(self):
for p in 'sd':
f = getattr(fblas,p+'ger',None)
if f is None:
continue
assert_array_almost_equal(f(1,[1,
2],[3,4]),[[3,4],[6,8]])
assert_array_almost_equal(f(2,[1,
2,
3],[3,4]),[[6,8],[12,16],[18,24]])
assert_array_almost_equal(f(1,[1,
2],[3,4],
a=[[1,2],[3,4]]
),[[4,6],[9,12]])
for p in 'cz':
f = getattr(fblas,p+'geru',None)
if f is None:
continue
assert_array_almost_equal(f(1,[1j,
2],[3,4]),[[3j,4j],[6,8]])
assert_array_almost_equal(f(-2,[1j,
2j,
3j],[3j,4j]),[[6,8],[12,16],[18,24]])
for p in 'cz':
for name in ('ger', 'gerc'):
f = getattr(fblas,p+name,None)
if f is None:
continue
assert_array_almost_equal(f(1,[1j,
2],[3,4]),[[3j,4j],[6,8]])
assert_array_almost_equal(f(2,[1j,
2j,
3j],[3j,4j]),[[6,8],[12,16],[18,24]])
def test_syr_her(self):
x = np.arange(1, 5, dtype='d')
resx = np.triu(x[:, np.newaxis] * x)
resx_reverse = np.triu(x[::-1, np.newaxis] * x[::-1])
y = np.linspace(0,8.5,17,endpoint=False)
z = np.arange(1, 9, dtype='d').view('D')
resz = np.triu(z[:, np.newaxis] * z)
resz_reverse = np.triu(z[::-1, np.newaxis] * z[::-1])
rehz = np.triu(z[:, np.newaxis] * z.conj())
rehz_reverse = np.triu(z[::-1, np.newaxis] * z[::-1].conj())
w = np.c_[np.zeros(4), z, np.zeros(4)].ravel()
for p, rtol in zip('sd',[1e-7,1e-14]):
f = getattr(fblas, p+'syr', None)
if f is None:
continue
assert_allclose(f(1.0, x), resx, rtol=rtol)
assert_allclose(f(1.0, x, lower=True), resx.T, rtol=rtol)
assert_allclose(f(1.0, y, incx=2, offx=2, n=4), resx, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, y, incx=-2, offx=2, n=4),
resx_reverse, rtol=rtol)
a = np.zeros((4,4), 'f' if p == 's' else 'd', 'F')
b = f(1.0, x, a=a, overwrite_a=True)
assert_allclose(a, resx, rtol=rtol)
b = f(2.0, x, a=a)
assert_(a is not b)
assert_allclose(b, 3*resx, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2,2), 'd', 'F'))
for p, rtol in zip('cz',[1e-7,1e-14]):
f = getattr(fblas, p+'syr', None)
if f is None:
continue
assert_allclose(f(1.0, z), resz, rtol=rtol)
assert_allclose(f(1.0, z, lower=True), resz.T, rtol=rtol)
assert_allclose(f(1.0, w, incx=3, offx=1, n=4), resz, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, w, incx=-3, offx=1, n=4),
resz_reverse, rtol=rtol)
a = np.zeros((4,4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, z, a=a, overwrite_a=True)
assert_allclose(a, resz, rtol=rtol)
b = f(2.0, z, a=a)
assert_(a is not b)
assert_allclose(b, 3*resz, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2,2), 'd', 'F'))
for p, rtol in zip('cz',[1e-7,1e-14]):
f = getattr(fblas, p+'her', None)
if f is None:
continue
assert_allclose(f(1.0, z), rehz, rtol=rtol)
assert_allclose(f(1.0, z, lower=True), rehz.T.conj(), rtol=rtol)
assert_allclose(f(1.0, w, incx=3, offx=1, n=4), rehz, rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, w, incx=-3, offx=1, n=4),
rehz_reverse, rtol=rtol)
a = np.zeros((4,4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, z, a=a, overwrite_a=True)
assert_allclose(a, rehz, rtol=rtol)
b = f(2.0, z, a=a)
assert_(a is not b)
assert_allclose(b, 3*rehz, rtol=rtol)
assert_raises(Exception, f, 1.0, x, incx=0)
assert_raises(Exception, f, 1.0, x, offx=5)
assert_raises(Exception, f, 1.0, x, offx=-2)
assert_raises(Exception, f, 1.0, x, n=-2)
assert_raises(Exception, f, 1.0, x, n=5)
assert_raises(Exception, f, 1.0, x, lower=2)
assert_raises(Exception, f, 1.0, x, a=np.zeros((2,2), 'd', 'F'))
def test_syr2(self):
x = np.arange(1, 5, dtype='d')
y = np.arange(5, 9, dtype='d')
resxy = np.triu(x[:, np.newaxis] * y + y[:, np.newaxis] * x)
resxy_reverse = np.triu(x[::-1, np.newaxis] * y[::-1]
+ y[::-1, np.newaxis] * x[::-1])
q = np.linspace(0,8.5,17,endpoint=False)
for p, rtol in zip('sd',[1e-7,1e-14]):
f = getattr(fblas, p+'syr2', None)
if f is None:
continue
assert_allclose(f(1.0, x, y), resxy, rtol=rtol)
assert_allclose(f(1.0, x, y, n=3), resxy[:3,:3], rtol=rtol)
assert_allclose(f(1.0, x, y, lower=True), resxy.T, rtol=rtol)
assert_allclose(f(1.0, q, q, incx=2, offx=2, incy=2, offy=10),
resxy, rtol=rtol)
assert_allclose(f(1.0, q, q, incx=2, offx=2, incy=2, offy=10, n=3),
resxy[:3,:3], rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, q, q, incx=-2, offx=2, incy=-2, offy=10),
resxy_reverse, rtol=rtol)
a = np.zeros((4,4), 'f' if p == 's' else 'd', 'F')
b = f(1.0, x, y, a=a, overwrite_a=True)
assert_allclose(a, resxy, rtol=rtol)
b = f(2.0, x, y, a=a)
assert_(a is not b)
assert_allclose(b, 3*resxy, rtol=rtol)
assert_raises(Exception, f, 1.0, x, y, incx=0)
assert_raises(Exception, f, 1.0, x, y, offx=5)
assert_raises(Exception, f, 1.0, x, y, offx=-2)
assert_raises(Exception, f, 1.0, x, y, incy=0)
assert_raises(Exception, f, 1.0, x, y, offy=5)
assert_raises(Exception, f, 1.0, x, y, offy=-2)
assert_raises(Exception, f, 1.0, x, y, n=-2)
assert_raises(Exception, f, 1.0, x, y, n=5)
assert_raises(Exception, f, 1.0, x, y, lower=2)
assert_raises(Exception, f, 1.0, x, y, a=np.zeros((2,2), 'd', 'F'))
def test_her2(self):
x = np.arange(1, 9, dtype='d').view('D')
y = np.arange(9, 17, dtype='d').view('D')
resxy = x[:, np.newaxis] * y.conj() + y[:, np.newaxis] * x.conj()
resxy = np.triu(resxy)
resxy_reverse = x[::-1, np.newaxis] * y[::-1].conj()
resxy_reverse += y[::-1, np.newaxis] * x[::-1].conj()
resxy_reverse = np.triu(resxy_reverse)
u = np.c_[np.zeros(4), x, np.zeros(4)].ravel()
v = np.c_[np.zeros(4), y, np.zeros(4)].ravel()
for p, rtol in zip('cz',[1e-7,1e-14]):
f = getattr(fblas, p+'her2', None)
if f is None:
continue
assert_allclose(f(1.0, x, y), resxy, rtol=rtol)
assert_allclose(f(1.0, x, y, n=3), resxy[:3,:3], rtol=rtol)
assert_allclose(f(1.0, x, y, lower=True), resxy.T.conj(), rtol=rtol)
assert_allclose(f(1.0, u, v, incx=3, offx=1, incy=3, offy=1),
resxy, rtol=rtol)
assert_allclose(f(1.0, u, v, incx=3, offx=1, incy=3, offy=1, n=3),
resxy[:3,:3], rtol=rtol)
# negative increments imply reversed vectors in blas
assert_allclose(f(1.0, u, v, incx=-3, offx=1, incy=-3, offy=1),
resxy_reverse, rtol=rtol)
a = np.zeros((4,4), 'F' if p == 'c' else 'D', 'F')
b = f(1.0, x, y, a=a, overwrite_a=True)
assert_allclose(a, resxy, rtol=rtol)
b = f(2.0, x, y, a=a)
assert_(a is not b)
assert_allclose(b, 3*resxy, rtol=rtol)
assert_raises(Exception, f, 1.0, x, y, incx=0)
assert_raises(Exception, f, 1.0, x, y, offx=5)
assert_raises(Exception, f, 1.0, x, y, offx=-2)
assert_raises(Exception, f, 1.0, x, y, incy=0)
assert_raises(Exception, f, 1.0, x, y, offy=5)
assert_raises(Exception, f, 1.0, x, y, offy=-2)
assert_raises(Exception, f, 1.0, x, y, n=-2)
assert_raises(Exception, f, 1.0, x, y, n=5)
assert_raises(Exception, f, 1.0, x, y, lower=2)
assert_raises(Exception, f, 1.0, x, y, a=np.zeros((2,2), 'd', 'F'))
class TestFBLAS3Simple(TestCase):
def test_gemm(self):
for p in 'sd':
f = getattr(fblas,p+'gemm',None)
if f is None:
continue
assert_array_almost_equal(f(3,[3],[-4]),[[-36]])
assert_array_almost_equal(f(3,[3],[-4],3,[5]),[-21])
for p in 'cz':
f = getattr(fblas,p+'gemm',None)
if f is None:
continue
assert_array_almost_equal(f(3j,[3-4j],[-4]),[[-48-36j]])
assert_array_almost_equal(f(3j,[3-4j],[-4],3,[5j]),[-48-21j])
def _get_func(func, ps='sdzc'):
"""Just a helper: return a specified BLAS function w/typecode."""
for p in ps:
f = getattr(fblas, p+func, None)
if f is None:
continue
yield f
class TestBLAS3Symm(TestCase):
def setUp(self):
self.a = np.array([[1., 2.],
[0., 1.]])
self.b = np.array([[1., 0., 3.],
[0., -1., 2.]])
self.c = np.ones((2,3))
self.t = np.array([[2., -1., 8.],
[3., 0., 9.]])
def test_symm(self):
for f in _get_func('symm'):
res = f(a=self.a, b=self.b, c=self.c, alpha=1., beta=1.)
assert_array_almost_equal(res, self.t)
res = f(a=self.a.T, b=self.b, lower=1, c=self.c, alpha=1., beta=1.)
assert_array_almost_equal(res, self.t)
res = f(a=self.a, b=self.b.T, side=1, c=self.c.T, alpha=1., beta=1.)
assert_array_almost_equal(res, self.t.T)
def test_summ_wrong_side(self):
f = getattr(fblas, 'dsymm', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a, 'b': self.b, 'alpha': 1,
'side': 1})
# `side=1` means C <- B*A, hence shapes of A and B are to be
# compatible. Otherwise, f2py exception is raised
def test_symm_wrong_uplo(self):
"""SYMM only considers the upper/lower part of A. Hence setting
wrong value for `lower` (default is lower=0, meaning upper triangle)
gives a wrong result.
"""
f = getattr(fblas,'dsymm',None)
if f is not None:
res = f(a=self.a, b=self.b, c=self.c, alpha=1., beta=1.)
assert np.allclose(res, self.t)
res = f(a=self.a, b=self.b, lower=1, c=self.c, alpha=1., beta=1.)
assert not np.allclose(res, self.t)
class TestBLAS3Syrk(TestCase):
def setUp(self):
self.a = np.array([[1., 0.],
[0., -2.],
[2., 3.]])
self.t = np.array([[1., 0., 2.],
[0., 4., -6.],
[2., -6., 13.]])
self.tt = np.array([[5., 6.],
[6., 13.]])
def test_syrk(self):
for f in _get_func('syrk'):
c = f(a=self.a, alpha=1.)
assert_array_almost_equal(np.triu(c), np.triu(self.t))
c = f(a=self.a, alpha=1., lower=1)
assert_array_almost_equal(np.tril(c), np.tril(self.t))
c0 = np.ones(self.t.shape)
c = f(a=self.a, alpha=1., beta=1., c=c0)
assert_array_almost_equal(np.triu(c), np.triu(self.t+c0))
c = f(a=self.a, alpha=1., trans=1)
assert_array_almost_equal(np.triu(c), np.triu(self.tt))
#prints '0-th dimension must be fixed to 3 but got 5', FIXME: suppress?
# FIXME: how to catch the _fblas.error?
def test_syrk_wrong_c(self):
f = getattr(fblas, 'dsyrk', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a, 'alpha': 1.,
'c': np.ones((5, 8))})
# if C is supplied, it must have compatible dimensions
class TestBLAS3Syr2k(TestCase):
def setUp(self):
self.a = np.array([[1., 0.],
[0., -2.],
[2., 3.]])
self.b = np.array([[0., 1.],
[1., 0.],
[0, 1.]])
self.t = np.array([[0., -1., 3.],
[-1., 0., 0.],
[3., 0., 6.]])
self.tt = np.array([[0., 1.],
[1., 6]])
def test_syr2k(self):
for f in _get_func('syr2k'):
c = f(a=self.a, b=self.b, alpha=1.)
assert_array_almost_equal(np.triu(c), np.triu(self.t))
c = f(a=self.a, b=self.b, alpha=1., lower=1)
assert_array_almost_equal(np.tril(c), np.tril(self.t))
c0 = np.ones(self.t.shape)
c = f(a=self.a, b=self.b, alpha=1., beta=1., c=c0)
assert_array_almost_equal(np.triu(c), np.triu(self.t+c0))
c = f(a=self.a, b=self.b, alpha=1., trans=1)
assert_array_almost_equal(np.triu(c), np.triu(self.tt))
#prints '0-th dimension must be fixed to 3 but got 5', FIXME: suppress?
def test_syr2k_wrong_c(self):
f = getattr(fblas, 'dsyr2k', None)
if f is not None:
assert_raises(Exception, f, **{'a': self.a, 'b': self.b, 'alpha': 1.,
'c': np.zeros((15, 8))})
# if C is supplied, it must have compatible dimensions
class TestSyHe(TestCase):
"""Quick and simple tests for (zc)-symm, syrk, syr2k."""
def setUp(self):
self.sigma_y = np.array([[0., -1.j],
[1.j, 0.]])
def test_symm_zc(self):
for f in _get_func('symm', 'zc'):
# NB: a is symmetric w/upper diag of ONLY
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, -1]))
def test_hemm_zc(self):
for f in _get_func('hemm', 'zc'):
# NB: a is hermitian w/upper diag of ONLY
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, 1]))
def test_syrk_zr(self):
for f in _get_func('syrk', 'zc'):
res = f(a=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([-1, -1]))
def test_herk_zr(self):
for f in _get_func('herk', 'zc'):
res = f(a=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), np.diag([1, 1]))
def test_syr2k_zr(self):
for f in _get_func('syr2k', 'zc'):
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), 2.*np.diag([-1, -1]))
def test_her2k_zr(self):
for f in _get_func('her2k', 'zc'):
res = f(a=self.sigma_y, b=self.sigma_y, alpha=1.)
assert_array_almost_equal(np.triu(res), 2.*np.diag([1, 1]))
class TestTRMM(TestCase):
"""Quick and simple tests for dtrmm."""
def setUp(self):
self.a = np.array([[1., 2., ],
[-2., 1.]])
self.b = np.array([[3., 4., -1.],
[5., 6., -2.]])
def test_ab(self):
f = getattr(fblas, 'dtrmm', None)
if f is not None:
result = f(1., self.a, self.b)
expected = np.array([[13., 16., -5.],
[5., 6., -2.]]) # default a is upper triangular
assert_array_almost_equal(result, expected)
def test_ab_lower(self):
f = getattr(fblas, 'dtrmm', None)
if f is not None:
result = f(1., self.a, self.b, lower=True)
expected = np.array([[3., 4., -1.],
[-1., -2., 0.]]) # now a is lower triangular
assert_array_almost_equal(result, expected)
def test_b_overwrites(self):
# BLAS dtrmm modifies B argument in-place.
# Here the default is to copy, but this can be overridden
f = getattr(fblas, 'dtrmm', None)
if f is not None:
for overwr in [True, False]:
bcopy = self.b.copy()
result = f(1., self.a, bcopy, overwrite_b=overwr)
# C-contiguous arrays are copied
assert_(bcopy.flags.f_contiguous is False and
np.may_share_memory(bcopy, result) is False)
assert_equal(bcopy, self.b)
bcopy = np.asfortranarray(self.b.copy()) # or just transpose it
result = f(1., self.a, bcopy, overwrite_b=True)
assert_(bcopy.flags.f_contiguous is True and
np.may_share_memory(bcopy, result) is True)
assert_array_almost_equal(bcopy, result)
if __name__ == "__main__":
run_module_suite()
| agpl-3.0 |
xnox/systemd | .ycm_extra_conf.py | 129 | 1551 | import itertools
import os
import subprocess
def GetFlagsFromMakefile(varname):
return subprocess.check_output([
"make", "-s", "print-%s" % varname]).decode().split()
def Flatten(lists):
return list(itertools.chain.from_iterable(lists))
def DirectoryOfThisScript():
return os.path.dirname(os.path.abspath(__file__))
def MakeRelativePathsInFlagsAbsolute(flags, working_directory):
if not working_directory:
return flags
new_flags = []
make_next_absolute = False
path_flags = [ '-isystem', '-I', '-iquote', '--sysroot=' ]
for flag in flags:
new_flag = flag
if make_next_absolute:
make_next_absolute = False
if not flag.startswith('/'):
new_flag = os.path.join(working_directory, flag)
for path_flag in path_flags:
if flag == path_flag:
make_next_absolute = True
break
if flag.startswith(path_flag):
path = flag[ len(path_flag): ]
new_flag = path_flag + os.path.join(working_directory, path)
break
if new_flag:
new_flags.append(new_flag)
return new_flags
def FlagsForFile(filename):
relative_to = DirectoryOfThisScript()
return {
'flags': MakeRelativePathsInFlagsAbsolute(flags, relative_to),
'do_cache': True
}
flags = Flatten(map(GetFlagsFromMakefile, [
'AM_CPPFLAGS',
'CPPFLAGS',
'AM_CFLAGS',
'CFLAGS',
]))
# these flags cause crashes in libclang, so remove them
flags.remove('-Wlogical-op')
flags.remove('-Wsuggest-attribute=noreturn')
flags.remove('-Wdate-time')
# vim: set et ts=2 sw=2:
| gpl-2.0 |
plasma-disassembler/plasma | plasma/lib/arch/x86/output.py | 3 | 16665 | #!/usr/bin/env python3
#
# PLASMA : Generate an indented asm code (pseudo-C) with colored syntax.
# Copyright (C) 2015 Joel
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from capstone.x86 import (X86_INS_ADD, X86_INS_AND, X86_INS_CMP, X86_INS_DEC,
X86_INS_IDIV, X86_INS_IMUL, X86_INS_INC, X86_INS_MOV, X86_INS_SHL,
X86_INS_SHR, X86_INS_SUB, X86_INS_XOR, X86_OP_IMM,
X86_OP_INVALID, X86_OP_MEM, X86_OP_REG, X86_REG_EBP, X86_REG_EIP,
X86_REG_RBP, X86_REG_RIP, X86_INS_CDQE, X86_INS_LEA, X86_INS_MOVSX,
X86_INS_OR, X86_INS_NOT, X86_PREFIX_REP, X86_PREFIX_REPNE,
X86_INS_TEST, X86_INS_JNS, X86_INS_JS, X86_INS_MUL, X86_INS_JP,
X86_INS_JNP, X86_INS_JCXZ, X86_INS_JECXZ, X86_INS_JRCXZ,
X86_INS_SAR, X86_INS_SAL, X86_INS_MOVZX, X86_INS_STOSB,
X86_INS_STOSW, X86_INS_STOSD, X86_INS_STOSQ, X86_INS_MOVSB,
X86_INS_MOVSW, X86_INS_MOVSD, X86_INS_MOVSQ, X86_INS_LODSB,
X86_INS_LODSW, X86_INS_LODSD, X86_INS_LODSQ, X86_INS_CMPSB,
X86_INS_CMPSW, X86_INS_CMPSD, X86_INS_CMPSQ, X86_INS_SCASB,
X86_INS_SCASW, X86_INS_SCASD, X86_INS_SCASQ, X86_INS_XADD, X86_PREFIX_LOCK,
X86_INS_MOVSS, X86_INS_MOVAPD, X86_INS_MOVAPS, X86_INS_MOVUPS, X86_INS_MOVUPD)
from plasma.lib.output import OutputAbs
from plasma.lib.arch.x86.utils import (inst_symbol, is_call, is_jump, is_ret,
is_uncond_jump, cond_symbol, is_pushpop)
ASSIGNMENT_OPS = {X86_INS_XOR, X86_INS_AND, X86_INS_OR,
X86_INS_SAR, X86_INS_SAL, X86_INS_SHR, X86_INS_SHL}
# After these instructions we need to add a zero
# example : jns ADDR -> if > 0
COND_ADD_ZERO = {
X86_INS_JNS,
X86_INS_JS,
X86_INS_JP,
X86_INS_JNP,
X86_INS_JCXZ,
X86_INS_JECXZ,
X86_INS_JRCXZ
}
INST_CHECK = {X86_INS_SUB, X86_INS_ADD, X86_INS_MOV, X86_INS_CMP,
X86_INS_XOR, X86_INS_AND, X86_INS_SHR, X86_INS_SHL, X86_INS_IMUL,
X86_INS_SAR, X86_INS_SAL, X86_INS_MOVZX, X86_INS_XADD,
X86_INS_DEC, X86_INS_INC, X86_INS_LEA, X86_INS_MOVSX, X86_INS_OR}
INST_STOS = {X86_INS_STOSB, X86_INS_STOSW, X86_INS_STOSD, X86_INS_STOSQ}
INST_LODS = {X86_INS_LODSB, X86_INS_LODSW, X86_INS_LODSD, X86_INS_LODSQ}
INST_MOVS = {X86_INS_MOVSB, X86_INS_MOVSW, X86_INS_MOVSD, X86_INS_MOVSQ}
INST_CMPS = {X86_INS_CMPSB, X86_INS_CMPSW, X86_INS_CMPSD, X86_INS_CMPSQ}
INST_SCAS = {X86_INS_SCASB, X86_INS_SCASW, X86_INS_SCASD, X86_INS_SCASQ}
INST_MOV_SSE = {X86_INS_MOVSD, X86_INS_MOVSS, X86_INS_MOVAPD, X86_INS_MOVAPS, X86_INS_MOVUPS, X86_INS_MOVUPD}
REP_PREFIX = {X86_PREFIX_REPNE, X86_PREFIX_REP}
def is_sse_movd(i):
return i.id == X86_INS_MOVSD and i.bytes[0] == 0xF2
class Output(OutputAbs):
def _operand(self, i, num_op, hexa=False, show_deref=True,
force_dont_print_data=False, is_from_jump=False):
def inv(n):
return n == X86_OP_INVALID
op = i.operands[num_op]
if op.type == X86_OP_IMM:
self._imm(op.value.imm, op.size, hexa,
force_dont_print_data=force_dont_print_data,
is_from_jump=is_from_jump)
elif op.type == X86_OP_REG:
self._add(i.reg_name(op.value.reg))
# elif op.type == X86_OP_FP:
# self._add("%f" % op.value.fp)
elif op.type == X86_OP_MEM:
mm = op.mem
ret = self.get_var_offset(i, num_op)
if ret is not None:
func_addr, off = ret
if i.id == X86_INS_LEA:
self._add("&(")
self._variable(self.get_var_name(func_addr, off))
if i.id == X86_INS_LEA:
self._add(")")
return
if inv(mm.segment) and inv(mm.index) and mm.disp != 0:
if mm.base == X86_REG_RIP or mm.base == X86_REG_EIP:
ad = i.address + i.size + mm.disp
if i.id != X86_INS_LEA and self.deref_if_offset(ad):
return
if show_deref:
self._add("*(")
dont_print_data = ad in self.gctx.db.imports or \
self.gctx.api.mem.is_code(ad)
self._imm(ad, 4, True,
force_dont_print_data=dont_print_data)
if show_deref:
self._add(")")
return
elif inv(mm.base):
if i.id != X86_INS_LEA and self.deref_if_offset(mm.disp):
return
printed = False
if show_deref:
self._add("*(")
if not inv(mm.base):
self._add(i.reg_name(mm.base))
printed = True
elif not inv(mm.segment):
self._add(i.reg_name(mm.segment))
printed = True
if not inv(mm.index):
if printed:
self._add(" + ")
if mm.scale == 1:
self._add(i.reg_name(mm.index))
else:
self._add("(%s*%d)" % (i.reg_name(mm.index), mm.scale))
printed = True
if mm.disp != 0:
section = self._binary.get_section(mm.disp)
is_label = self.is_label(mm.disp)
if is_label or section is not None:
if printed:
self._add(" + ")
self._imm(mm.disp, 0, True, section=section,
force_dont_print_data=True)
else:
if printed:
if mm.disp < 0:
self._add(" - %d" % (-mm.disp))
else:
self._add(" + %d" % mm.disp)
else:
self._add("%d" % mm.disp)
if show_deref:
self._add(")")
def _if_cond(self, jump_cond, fused_inst):
if fused_inst is None:
self._add(cond_symbol(jump_cond))
if jump_cond in COND_ADD_ZERO:
self._add(" 0")
return
assignment = fused_inst.id in ASSIGNMENT_OPS
if assignment:
self._add("(")
self._add("(")
self._operand(fused_inst, 0)
self._add(" ")
if fused_inst.id == X86_INS_TEST:
self._add(cond_symbol(jump_cond))
elif assignment:
self._add(inst_symbol(fused_inst))
self._add(" ")
self._operand(fused_inst, 1)
self._add(") ")
self._add(cond_symbol(jump_cond))
else:
self._add(cond_symbol(jump_cond))
self._add(" ")
self._operand(fused_inst, 1)
if fused_inst.id == X86_INS_TEST or \
(fused_inst.id != X86_INS_CMP and \
(jump_cond in COND_ADD_ZERO or assignment)):
self._add(" 0")
self._add(")")
def _rep_begin(self, i, tab):
if i.prefix[0] in REP_PREFIX:
self._new_line()
self._tabs(tab)
self._keyword("while")
# TODO: for 16 and 32 bits
self._add(" (!rcx) {")
self._new_line()
self._tabs(tab + 1)
self._address(i.address)
tab += 1
return tab
def _rep_end(self, i, tab):
if i.prefix[0] in REP_PREFIX:
self._new_line()
self._tabs(tab)
self._address(i.address)
self._add("rcx--")
self._new_line()
if i.prefix[0] == X86_PREFIX_REPNE:
self._tabs(tab)
self._keyword("if")
self._add(" (Z) ")
self._keyword("break")
self._new_line()
tab -= 1
self._tabs(tab)
self._add("}")
return tab
def _pre_asm_inst(self, i, tab):
if i.prefix[0] == X86_PREFIX_LOCK:
self._add("lock ")
return tab
return self._rep_begin(i, tab)
def _post_asm_inst(self, i, tab):
self._rep_end(i, tab)
def _sub_asm_inst(self, i, tab=0):
modified = False
# TODO: bad hack
ops = i.operands
is_imm = i.address in self.gctx.db.immediates and len(ops) == 2 and \
(ops[1].type == X86_OP_MEM or \
ops[0].type == X86_OP_REG and ops[1].type == X86_OP_IMM) and \
i.id not in [X86_INS_CMP, X86_INS_TEST]
if self.gctx.capstone_string == 0:
if is_imm:
self._section("!")
self._operand(i, 0)
self._add(" = ")
self._imm(self.gctx.db.immediates[i.address],
self._dis.wordsize, False)
return
if i.id in INST_CHECK:
if (i.id == X86_INS_OR and i.operands[1].type == X86_OP_IMM and
i.operands[1].value.imm == -1):
self._operand(i, 0)
self._add(" = -1")
elif (i.id == X86_INS_AND and i.operands[1].type == X86_OP_IMM and
i.operands[1].value.imm == 0):
self._operand(i, 0)
self._add(" = 0")
elif (all(op.type == X86_OP_REG for op in i.operands) and
len(set(op.value.reg for op in i.operands)) == 1 and
i.id == X86_INS_XOR):
self._operand(i, 0)
self._add(" = 0")
elif i.id == X86_INS_INC or i.id == X86_INS_DEC:
self._operand(i, 0)
self._add(inst_symbol(i))
elif i.id == X86_INS_LEA:
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1, show_deref=False)
elif i.id == X86_INS_MOVZX:
self._operand(i, 0)
self._add(" = (zero ext) ")
self._operand(i, 1)
elif i.id == X86_INS_IMUL:
if len(i.operands) == 3:
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1)
self._add(" " + inst_symbol(i).rstrip('=') + " ")
self._operand(i, 2)
elif len(i.operands) == 2:
self._operand(i, 0)
self._add(" " + inst_symbol(i) + " ")
self._operand(i, 1)
elif len(i.operands) == 1:
sz = i.operands[0].size
if sz == 1:
self._add("ax = al * ")
elif sz == 2:
self._add("dx:ax = ax * ")
elif sz == 4:
self._add("edx:eax = eax * ")
elif sz == 8:
self._add("rdx:rax = rax * ")
self._operand(i, 0)
elif i.id == X86_INS_XADD:
self._add("tmp = ")
self._operand(i, 0)
self._add("; ")
self._operand(i, 0)
self._add(" += ")
self._operand(i, 1)
self._add("; ")
self._operand(i, 1)
self._add(" = tmp")
else:
self._operand(i, 0)
self._add(" " + inst_symbol(i) + " ")
self._operand(i, 1)
return
if i.id == X86_INS_CDQE:
self._add("rax = eax")
return
if i.id == X86_INS_IDIV:
self._add('eax = edx:eax / ')
self._operand(i, 0)
self._add('; edx = edx:eax % ')
self._operand(i, 0)
return
if i.id == X86_INS_MUL:
lut = {1: ("al", "ax"), 2: ("ax", "dx:ax"), 4: ("eax", "edx:eax"),
8: ("rax", "rdx:rax")}
src, dst = lut[i.operands[0].size]
self._add('{0} = {1} * '.format(dst, src))
self._operand(i, 0)
return
if i.id == X86_INS_NOT:
self._operand(i, 0)
self._add(' ~= ')
self._operand(i, 0)
return
if i.id in INST_SCAS:
self._operand(i, 0)
self._add(" cmp ")
self._operand(i, 1)
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 1, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
return
if i.id in INST_STOS:
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1)
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 0, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
return
if i.id in INST_LODS:
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1)
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 1, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
return
if i.id in INST_CMPS:
self._operand(i, 0)
self._add(" cmp ")
self._operand(i, 1)
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 0, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 1, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
return
# Be sure to differentiate between the SSE instruction MOVSD and the non SSE move string
if i.id in INST_MOVS and not is_sse_movd(i):
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1)
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 0, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
self._new_line()
self._tabs(tab)
self._address(i.address)
self._operand(i, 1, show_deref=False)
self._add(" += D ? -{0} : {0}".format(i.operands[0].size))
return
if i.id in INST_MOV_SSE:
self._operand(i, 0)
self._add(" = ")
self._operand(i, 1)
return
if is_imm:
self._section("!")
self._add("mov ")
self._operand(i, 0)
self._add(", ")
self._imm(self.gctx.db.immediates[i.address],
self._dis.wordsize, True)
return
if len(i.operands) > 0:
if is_pushpop(i):
self._pushpop(i.mnemonic)
self._add(" ")
else:
self._add("%s " % i.mnemonic)
self._operand(i, 0)
k = 1
while k < len(i.operands):
self._add(", ")
self._operand(i, k)
k += 1
else:
if is_pushpop(i):
self._pushpop(i.mnemonic)
else:
self._add(i.mnemonic)
| gpl-3.0 |
Weicong-Lin/pymo-global | android/pgs4a-0.9.6/python-install/lib/python2.7/test/test_dis.py | 19 | 4548 | # Minimal tests for dis module
from test.test_support import run_unittest
import unittest
import sys
import dis
import StringIO
def _f(a):
print a
return 1
dis_f = """\
%-4d 0 LOAD_FAST 0 (a)
3 PRINT_ITEM
4 PRINT_NEWLINE
%-4d 5 LOAD_CONST 1 (1)
8 RETURN_VALUE
"""%(_f.func_code.co_firstlineno + 1,
_f.func_code.co_firstlineno + 2)
def bug708901():
for res in range(1,
10):
pass
dis_bug708901 = """\
%-4d 0 SETUP_LOOP 23 (to 26)
3 LOAD_GLOBAL 0 (range)
6 LOAD_CONST 1 (1)
%-4d 9 LOAD_CONST 2 (10)
12 CALL_FUNCTION 2
15 GET_ITER
>> 16 FOR_ITER 6 (to 25)
19 STORE_FAST 0 (res)
%-4d 22 JUMP_ABSOLUTE 16
>> 25 POP_BLOCK
>> 26 LOAD_CONST 0 (None)
29 RETURN_VALUE
"""%(bug708901.func_code.co_firstlineno + 1,
bug708901.func_code.co_firstlineno + 2,
bug708901.func_code.co_firstlineno + 3)
def bug1333982(x=[]):
assert 0, ([s for s in x] +
1)
pass
dis_bug1333982 = """\
%-4d 0 LOAD_CONST 1 (0)
3 POP_JUMP_IF_TRUE 38
6 LOAD_GLOBAL 0 (AssertionError)
9 BUILD_LIST 0
12 LOAD_FAST 0 (x)
15 GET_ITER
>> 16 FOR_ITER 12 (to 31)
19 STORE_FAST 1 (s)
22 LOAD_FAST 1 (s)
25 LIST_APPEND 2
28 JUMP_ABSOLUTE 16
%-4d >> 31 LOAD_CONST 2 (1)
34 BINARY_ADD
35 RAISE_VARARGS 2
%-4d >> 38 LOAD_CONST 0 (None)
41 RETURN_VALUE
"""%(bug1333982.func_code.co_firstlineno + 1,
bug1333982.func_code.co_firstlineno + 2,
bug1333982.func_code.co_firstlineno + 3)
_BIG_LINENO_FORMAT = """\
%3d 0 LOAD_GLOBAL 0 (spam)
3 POP_TOP
4 LOAD_CONST 0 (None)
7 RETURN_VALUE
"""
class DisTests(unittest.TestCase):
def do_disassembly_test(self, func, expected):
s = StringIO.StringIO()
save_stdout = sys.stdout
sys.stdout = s
dis.dis(func)
sys.stdout = save_stdout
got = s.getvalue()
# Trim trailing blanks (if any).
lines = got.split('\n')
lines = [line.rstrip() for line in lines]
expected = expected.split("\n")
import difflib
if expected != lines:
self.fail(
"events did not match expectation:\n" +
"\n".join(difflib.ndiff(expected,
lines)))
def test_opmap(self):
self.assertEqual(dis.opmap["STOP_CODE"], 0)
self.assertIn(dis.opmap["LOAD_CONST"], dis.hasconst)
self.assertIn(dis.opmap["STORE_NAME"], dis.hasname)
def test_opname(self):
self.assertEqual(dis.opname[dis.opmap["LOAD_FAST"]], "LOAD_FAST")
def test_boundaries(self):
self.assertEqual(dis.opmap["EXTENDED_ARG"], dis.EXTENDED_ARG)
self.assertEqual(dis.opmap["STORE_NAME"], dis.HAVE_ARGUMENT)
def test_dis(self):
self.do_disassembly_test(_f, dis_f)
def test_bug_708901(self):
self.do_disassembly_test(bug708901, dis_bug708901)
def test_bug_1333982(self):
# This one is checking bytecodes generated for an `assert` statement,
# so fails if the tests are run with -O. Skip this test then.
if __debug__:
self.do_disassembly_test(bug1333982, dis_bug1333982)
def test_big_linenos(self):
def func(count):
namespace = {}
func = "def foo():\n " + "".join(["\n "] * count + ["spam\n"])
exec func in namespace
return namespace['foo']
# Test all small ranges
for i in xrange(1, 300):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
# Test some larger ranges too
for i in xrange(300, 5000, 10):
expected = _BIG_LINENO_FORMAT % (i + 2)
self.do_disassembly_test(func(i), expected)
def test_main():
run_unittest(DisTests)
if __name__ == "__main__":
test_main()
| mit |
andy8788/hadoop-hdfs | src/contrib/hod/hodlib/Common/xmlrpc.py | 182 | 2374 | #Licensed to the Apache Software Foundation (ASF) under one
#or more contributor license agreements. See the NOTICE file
#distributed with this work for additional information
#regarding copyright ownership. The ASF licenses this file
#to you under the Apache License, Version 2.0 (the
#"License"); you may not use this file except in compliance
#with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import xmlrpclib, time, random, signal
from hodlib.Common.util import hodInterrupt, HodInterruptException
class hodXRClient(xmlrpclib.ServerProxy):
def __init__(self, uri, transport=None, encoding=None, verbose=0,
allow_none=0, installSignalHandlers=1, retryRequests=True, timeOut=15):
xmlrpclib.ServerProxy.__init__(self, uri, transport, encoding, verbose,
allow_none)
self.__retryRequests = retryRequests
self.__timeOut = timeOut
if (installSignalHandlers!=0):
self.__set_alarm()
def __set_alarm(self):
def alarm_handler(sigNum, sigHandler):
raise Exception("XML-RPC socket timeout.")
signal.signal(signal.SIGALRM, alarm_handler)
def __request(self, methodname, params):
response = None
retryWaitTime = 5 + random.randint(0, 5)
for i in range(0, 30):
signal.alarm(self.__timeOut)
try:
response = self._ServerProxy__request(methodname, params)
signal.alarm(0)
break
except Exception:
if self.__retryRequests:
if hodInterrupt.isSet():
raise HodInterruptException()
time.sleep(retryWaitTime)
else:
raise Exception("hodXRClientTimeout")
return response
def __getattr__(self, name):
# magic method dispatcher
return xmlrpclib._Method(self.__request, name)
| apache-2.0 |
laszlocsomor/tensorflow | tensorflow/python/keras/preprocessing/text/__init__.py | 69 | 1146 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras data preprocessing utils for text data."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras.preprocessing.text import one_hot
from tensorflow.python.keras._impl.keras.preprocessing.text import text_to_word_sequence
from tensorflow.python.keras._impl.keras.preprocessing.text import Tokenizer
del absolute_import
del division
del print_function
| apache-2.0 |
pellaeon/bsd-cloudinit | cloudbaseinit/plugins/common/factory.py | 1 | 1673 | # Copyright 2012 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from cloudbaseinit.utils import classloader
opts = [
cfg.ListOpt(
'plugins',
default=[
'cloudbaseinit.plugins.freebsd.sethostname.SetHostNamePlugin',
'cloudbaseinit.plugins.freebsd.scramblerootpassword.ScrambleRootPassword',
'cloudbaseinit.plugins.freebsd.createuser.CreateUserPlugin',
'cloudbaseinit.plugins.freebsd.setuserpassword.SetUserPasswordPlugin',
'cloudbaseinit.plugins.freebsd.enlargeroot.EnlargeRoot',
'cloudbaseinit.plugins.freebsd.sshpublickeys.SetUserSSHPublicKeysPlugin',
#'cloudbaseinit.plugins.freebsd.extendvolumes.ExtendVolumesPlugin',
'cloudbaseinit.plugins.freebsd.userdata.UserDataPlugin',
],
help='List of enabled plugin classes, '
'to executed in the provided order'),
]
CONF = cfg.CONF
CONF.register_opts(opts)
def load_plugins():
plugins = []
cl = classloader.ClassLoader()
for class_path in CONF.plugins:
plugins.append(cl.load_class(class_path)())
return plugins
| apache-2.0 |
isandlaTech/cohorte-devtools | org.cohorte.eclipse.runner.basic/files/jython/Lib/encodings/mac_greek.py | 593 | 13977 | """ Python Character Mapping Codec mac_greek generated from 'MAPPINGS/VENDORS/APPLE/GREEK.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='mac-greek',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> CONTROL CHARACTER
u'\x01' # 0x01 -> CONTROL CHARACTER
u'\x02' # 0x02 -> CONTROL CHARACTER
u'\x03' # 0x03 -> CONTROL CHARACTER
u'\x04' # 0x04 -> CONTROL CHARACTER
u'\x05' # 0x05 -> CONTROL CHARACTER
u'\x06' # 0x06 -> CONTROL CHARACTER
u'\x07' # 0x07 -> CONTROL CHARACTER
u'\x08' # 0x08 -> CONTROL CHARACTER
u'\t' # 0x09 -> CONTROL CHARACTER
u'\n' # 0x0A -> CONTROL CHARACTER
u'\x0b' # 0x0B -> CONTROL CHARACTER
u'\x0c' # 0x0C -> CONTROL CHARACTER
u'\r' # 0x0D -> CONTROL CHARACTER
u'\x0e' # 0x0E -> CONTROL CHARACTER
u'\x0f' # 0x0F -> CONTROL CHARACTER
u'\x10' # 0x10 -> CONTROL CHARACTER
u'\x11' # 0x11 -> CONTROL CHARACTER
u'\x12' # 0x12 -> CONTROL CHARACTER
u'\x13' # 0x13 -> CONTROL CHARACTER
u'\x14' # 0x14 -> CONTROL CHARACTER
u'\x15' # 0x15 -> CONTROL CHARACTER
u'\x16' # 0x16 -> CONTROL CHARACTER
u'\x17' # 0x17 -> CONTROL CHARACTER
u'\x18' # 0x18 -> CONTROL CHARACTER
u'\x19' # 0x19 -> CONTROL CHARACTER
u'\x1a' # 0x1A -> CONTROL CHARACTER
u'\x1b' # 0x1B -> CONTROL CHARACTER
u'\x1c' # 0x1C -> CONTROL CHARACTER
u'\x1d' # 0x1D -> CONTROL CHARACTER
u'\x1e' # 0x1E -> CONTROL CHARACTER
u'\x1f' # 0x1F -> CONTROL CHARACTER
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> CONTROL CHARACTER
u'\xc4' # 0x80 -> LATIN CAPITAL LETTER A WITH DIAERESIS
u'\xb9' # 0x81 -> SUPERSCRIPT ONE
u'\xb2' # 0x82 -> SUPERSCRIPT TWO
u'\xc9' # 0x83 -> LATIN CAPITAL LETTER E WITH ACUTE
u'\xb3' # 0x84 -> SUPERSCRIPT THREE
u'\xd6' # 0x85 -> LATIN CAPITAL LETTER O WITH DIAERESIS
u'\xdc' # 0x86 -> LATIN CAPITAL LETTER U WITH DIAERESIS
u'\u0385' # 0x87 -> GREEK DIALYTIKA TONOS
u'\xe0' # 0x88 -> LATIN SMALL LETTER A WITH GRAVE
u'\xe2' # 0x89 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
u'\xe4' # 0x8A -> LATIN SMALL LETTER A WITH DIAERESIS
u'\u0384' # 0x8B -> GREEK TONOS
u'\xa8' # 0x8C -> DIAERESIS
u'\xe7' # 0x8D -> LATIN SMALL LETTER C WITH CEDILLA
u'\xe9' # 0x8E -> LATIN SMALL LETTER E WITH ACUTE
u'\xe8' # 0x8F -> LATIN SMALL LETTER E WITH GRAVE
u'\xea' # 0x90 -> LATIN SMALL LETTER E WITH CIRCUMFLEX
u'\xeb' # 0x91 -> LATIN SMALL LETTER E WITH DIAERESIS
u'\xa3' # 0x92 -> POUND SIGN
u'\u2122' # 0x93 -> TRADE MARK SIGN
u'\xee' # 0x94 -> LATIN SMALL LETTER I WITH CIRCUMFLEX
u'\xef' # 0x95 -> LATIN SMALL LETTER I WITH DIAERESIS
u'\u2022' # 0x96 -> BULLET
u'\xbd' # 0x97 -> VULGAR FRACTION ONE HALF
u'\u2030' # 0x98 -> PER MILLE SIGN
u'\xf4' # 0x99 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
u'\xf6' # 0x9A -> LATIN SMALL LETTER O WITH DIAERESIS
u'\xa6' # 0x9B -> BROKEN BAR
u'\u20ac' # 0x9C -> EURO SIGN # before Mac OS 9.2.2, was SOFT HYPHEN
u'\xf9' # 0x9D -> LATIN SMALL LETTER U WITH GRAVE
u'\xfb' # 0x9E -> LATIN SMALL LETTER U WITH CIRCUMFLEX
u'\xfc' # 0x9F -> LATIN SMALL LETTER U WITH DIAERESIS
u'\u2020' # 0xA0 -> DAGGER
u'\u0393' # 0xA1 -> GREEK CAPITAL LETTER GAMMA
u'\u0394' # 0xA2 -> GREEK CAPITAL LETTER DELTA
u'\u0398' # 0xA3 -> GREEK CAPITAL LETTER THETA
u'\u039b' # 0xA4 -> GREEK CAPITAL LETTER LAMDA
u'\u039e' # 0xA5 -> GREEK CAPITAL LETTER XI
u'\u03a0' # 0xA6 -> GREEK CAPITAL LETTER PI
u'\xdf' # 0xA7 -> LATIN SMALL LETTER SHARP S
u'\xae' # 0xA8 -> REGISTERED SIGN
u'\xa9' # 0xA9 -> COPYRIGHT SIGN
u'\u03a3' # 0xAA -> GREEK CAPITAL LETTER SIGMA
u'\u03aa' # 0xAB -> GREEK CAPITAL LETTER IOTA WITH DIALYTIKA
u'\xa7' # 0xAC -> SECTION SIGN
u'\u2260' # 0xAD -> NOT EQUAL TO
u'\xb0' # 0xAE -> DEGREE SIGN
u'\xb7' # 0xAF -> MIDDLE DOT
u'\u0391' # 0xB0 -> GREEK CAPITAL LETTER ALPHA
u'\xb1' # 0xB1 -> PLUS-MINUS SIGN
u'\u2264' # 0xB2 -> LESS-THAN OR EQUAL TO
u'\u2265' # 0xB3 -> GREATER-THAN OR EQUAL TO
u'\xa5' # 0xB4 -> YEN SIGN
u'\u0392' # 0xB5 -> GREEK CAPITAL LETTER BETA
u'\u0395' # 0xB6 -> GREEK CAPITAL LETTER EPSILON
u'\u0396' # 0xB7 -> GREEK CAPITAL LETTER ZETA
u'\u0397' # 0xB8 -> GREEK CAPITAL LETTER ETA
u'\u0399' # 0xB9 -> GREEK CAPITAL LETTER IOTA
u'\u039a' # 0xBA -> GREEK CAPITAL LETTER KAPPA
u'\u039c' # 0xBB -> GREEK CAPITAL LETTER MU
u'\u03a6' # 0xBC -> GREEK CAPITAL LETTER PHI
u'\u03ab' # 0xBD -> GREEK CAPITAL LETTER UPSILON WITH DIALYTIKA
u'\u03a8' # 0xBE -> GREEK CAPITAL LETTER PSI
u'\u03a9' # 0xBF -> GREEK CAPITAL LETTER OMEGA
u'\u03ac' # 0xC0 -> GREEK SMALL LETTER ALPHA WITH TONOS
u'\u039d' # 0xC1 -> GREEK CAPITAL LETTER NU
u'\xac' # 0xC2 -> NOT SIGN
u'\u039f' # 0xC3 -> GREEK CAPITAL LETTER OMICRON
u'\u03a1' # 0xC4 -> GREEK CAPITAL LETTER RHO
u'\u2248' # 0xC5 -> ALMOST EQUAL TO
u'\u03a4' # 0xC6 -> GREEK CAPITAL LETTER TAU
u'\xab' # 0xC7 -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\xbb' # 0xC8 -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
u'\u2026' # 0xC9 -> HORIZONTAL ELLIPSIS
u'\xa0' # 0xCA -> NO-BREAK SPACE
u'\u03a5' # 0xCB -> GREEK CAPITAL LETTER UPSILON
u'\u03a7' # 0xCC -> GREEK CAPITAL LETTER CHI
u'\u0386' # 0xCD -> GREEK CAPITAL LETTER ALPHA WITH TONOS
u'\u0388' # 0xCE -> GREEK CAPITAL LETTER EPSILON WITH TONOS
u'\u0153' # 0xCF -> LATIN SMALL LIGATURE OE
u'\u2013' # 0xD0 -> EN DASH
u'\u2015' # 0xD1 -> HORIZONTAL BAR
u'\u201c' # 0xD2 -> LEFT DOUBLE QUOTATION MARK
u'\u201d' # 0xD3 -> RIGHT DOUBLE QUOTATION MARK
u'\u2018' # 0xD4 -> LEFT SINGLE QUOTATION MARK
u'\u2019' # 0xD5 -> RIGHT SINGLE QUOTATION MARK
u'\xf7' # 0xD6 -> DIVISION SIGN
u'\u0389' # 0xD7 -> GREEK CAPITAL LETTER ETA WITH TONOS
u'\u038a' # 0xD8 -> GREEK CAPITAL LETTER IOTA WITH TONOS
u'\u038c' # 0xD9 -> GREEK CAPITAL LETTER OMICRON WITH TONOS
u'\u038e' # 0xDA -> GREEK CAPITAL LETTER UPSILON WITH TONOS
u'\u03ad' # 0xDB -> GREEK SMALL LETTER EPSILON WITH TONOS
u'\u03ae' # 0xDC -> GREEK SMALL LETTER ETA WITH TONOS
u'\u03af' # 0xDD -> GREEK SMALL LETTER IOTA WITH TONOS
u'\u03cc' # 0xDE -> GREEK SMALL LETTER OMICRON WITH TONOS
u'\u038f' # 0xDF -> GREEK CAPITAL LETTER OMEGA WITH TONOS
u'\u03cd' # 0xE0 -> GREEK SMALL LETTER UPSILON WITH TONOS
u'\u03b1' # 0xE1 -> GREEK SMALL LETTER ALPHA
u'\u03b2' # 0xE2 -> GREEK SMALL LETTER BETA
u'\u03c8' # 0xE3 -> GREEK SMALL LETTER PSI
u'\u03b4' # 0xE4 -> GREEK SMALL LETTER DELTA
u'\u03b5' # 0xE5 -> GREEK SMALL LETTER EPSILON
u'\u03c6' # 0xE6 -> GREEK SMALL LETTER PHI
u'\u03b3' # 0xE7 -> GREEK SMALL LETTER GAMMA
u'\u03b7' # 0xE8 -> GREEK SMALL LETTER ETA
u'\u03b9' # 0xE9 -> GREEK SMALL LETTER IOTA
u'\u03be' # 0xEA -> GREEK SMALL LETTER XI
u'\u03ba' # 0xEB -> GREEK SMALL LETTER KAPPA
u'\u03bb' # 0xEC -> GREEK SMALL LETTER LAMDA
u'\u03bc' # 0xED -> GREEK SMALL LETTER MU
u'\u03bd' # 0xEE -> GREEK SMALL LETTER NU
u'\u03bf' # 0xEF -> GREEK SMALL LETTER OMICRON
u'\u03c0' # 0xF0 -> GREEK SMALL LETTER PI
u'\u03ce' # 0xF1 -> GREEK SMALL LETTER OMEGA WITH TONOS
u'\u03c1' # 0xF2 -> GREEK SMALL LETTER RHO
u'\u03c3' # 0xF3 -> GREEK SMALL LETTER SIGMA
u'\u03c4' # 0xF4 -> GREEK SMALL LETTER TAU
u'\u03b8' # 0xF5 -> GREEK SMALL LETTER THETA
u'\u03c9' # 0xF6 -> GREEK SMALL LETTER OMEGA
u'\u03c2' # 0xF7 -> GREEK SMALL LETTER FINAL SIGMA
u'\u03c7' # 0xF8 -> GREEK SMALL LETTER CHI
u'\u03c5' # 0xF9 -> GREEK SMALL LETTER UPSILON
u'\u03b6' # 0xFA -> GREEK SMALL LETTER ZETA
u'\u03ca' # 0xFB -> GREEK SMALL LETTER IOTA WITH DIALYTIKA
u'\u03cb' # 0xFC -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA
u'\u0390' # 0xFD -> GREEK SMALL LETTER IOTA WITH DIALYTIKA AND TONOS
u'\u03b0' # 0xFE -> GREEK SMALL LETTER UPSILON WITH DIALYTIKA AND TONOS
u'\xad' # 0xFF -> SOFT HYPHEN # before Mac OS 9.2.2, was undefined
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
Blackclaws/client | src/connectivity/helper.py | 1 | 8220 | from __future__ import division
from functools import partial
from PyQt4.QtCore import QObject, pyqtSignal, QTimer, Qt
from PyQt4.QtNetwork import QUdpSocket, QHostAddress, QAbstractSocket
import time
from connectivity import QTurnSocket
from connectivity.relay import Relay
from connectivity.turn import TURNState
from decorators import with_logger
from PyQt4 import QtGui, uic
@with_logger
class RelayTest(QObject):
finished = pyqtSignal()
progress = pyqtSignal(str)
def __init__(self, socket):
QObject.__init__(self)
self._socket = socket
self.start_time, self.end_time = None, None
self.addr = None
self.received = set()
self._sent, self._total = 0, 250
self.host, self.port = None, None
self._sendtimer = QTimer()
self._sendtimer.timeout.connect(self.send)
def start_relay_test(self, address):
self.addr = address
self._logger.info("Starting relay test")
self._socket.data.connect(self.receive)
self._socket.permit(self.addr)
self.start_time, self.end_time = time.time(), None
host, port = self.addr
self.host, self.port = QHostAddress(host), port
self._sent = 0
self.received = set()
self._sendtimer.start(20)
end_timer = QTimer()
end_timer.singleShot(10000, self.end)
@property
def report(self):
return "Relay address: {}\nReceived {} packets in {}s. {}% loss.". \
format("{}:{}".format(*self.addr),
len(self.received),
round((time.time()-self.start_time), 2),
round(100-(len(self.received)/self._sent) * 100), 2)
def send(self):
self._socket.writeDatagram(('{}'.format(self._sent)).encode(), self.host, self.port)
if self._sent >= self._total:
self._sendtimer.stop()
self._sent += 1
def end(self):
if self.end_time:
return
self.end_time = time.time()
self._sendtimer.stop()
self._logger.info('Relay test finished')
self.finished.emit()
self.socket.data.disconnect(self.receive)
def receive(self, sender, data):
self.received.add(int(data.decode()))
self.progress.emit(self.report)
if len(self.received) == self._total:
self.end()
@with_logger
class ConnectivityHelper(QObject):
connectivity_status_established = pyqtSignal(str, str)
# Emitted when a peer is bound to a local port
peer_bound = pyqtSignal(str, int, int)
ready = pyqtSignal()
relay_test_finished = pyqtSignal()
relay_test_progress = pyqtSignal(str)
error = pyqtSignal(str)
def __init__(self, client, port):
QObject.__init__(self)
self._client = client
self._port = port
self.game_port = port+1
self._socket = QTurnSocket(port, self._on_data)
self._socket.state_changed.connect(self.turn_state_changed)
self._client.subscribe_to('connectivity', self)
self.relay_address, self.mapped_address = None, None
self._relay_test = None
self._relays = {}
self.state = None
self.addr = None
@property
def is_ready(self):
return (self.relay_address is not None
and self.relay_address is not [None, None]
and self.mapped_address is not None
and self._socket.state() == QAbstractSocket.BoundState)
def start_test(self):
self.send('InitiateTest', [self._port])
def start_relay_test(self):
if not self._relay_test:
self._relay_test = RelayTest(self._socket)
self._relay_test.finished.connect(self.relay_test_finished.emit)
self._relay_test.progress.connect(self.relay_test_progress.emit)
if not self._socket.turn_state == TURNState.BOUND:
self._socket.connect_to_relay()
self._socket.bound.connect(self._relay_test.start_relay_test, Qt.UniqueConnection)
def _cleanup():
try:
self._socket.bound.disconnect(self._relay_test.start_relay_test)
except TypeError:
# For some reason pyqt raises _TypeError_ here
pass
self._relay_test.finished.connect(_cleanup, Qt.UniqueConnection)
else:
self._relay_test.start_relay_test(self.mapped_address)
def turn_state_changed(self, state):
if state == TURNState.BOUND:
self.relay_address = self._socket.relay_address
self.mapped_address = self._socket.relay_address
self.ready.emit()
def handle_SendNatPacket(self, msg):
target, message = msg['args']
host, port = target.split(':')
if self.state is None and self._socket.localPort() == self._port:
self._socket.randomize_port()
self._socket.writeDatagram(b'\x08'+message.encode(), QHostAddress(host), int(port))
def handle_ConnectivityState(self, msg):
from client import ClientState
state, addr = msg['args']
if state == 'BLOCKED':
self._logger.warning("Outbound traffic is blocked")
QtGui.QMessageBox.warning(None, "Traffic Blocked", "Your outbound traffic appears to be blocked. Try restarting FAF. <br/> If the error persists please contact a moderator and send your logs. <br/> We are already working on a solution to this problem.")
self._client.state = ClientState.NONE
else:
host, port = addr.split(':')
self.state, self.mapped_address = state, (host, port)
self.connectivity_status_established.emit(self.state, self.addr)
self._logger.info("Connectivity state is {}, mapped address: {}".format(state, addr))
def handle_message(self, msg):
command = msg.get('command')
if command == 'CreatePermission':
self._socket.permit(msg['args'])
def bind(self, (host, port), login, peer_id):
host, port = host, int(port)
relay = Relay(self.game_port, login, peer_id, partial(self.send_udp, (host, port)))
relay.bound.connect(partial(self.peer_bound.emit, login, peer_id))
relay.listen()
self._relays[(host, port)] = relay
def send(self, command, args):
self._client.send({
'command': command,
'target': 'connectivity',
'args': args or []
})
def prepare(self):
if self.state == 'STUN' and not self._socket.turn_state == TURNState.BOUND:
self._socket.connect_to_relay()
elif self.state == 'BLOCKED':
pass
else:
self.ready.emit()
def send_udp(self, (host, port), data):
host, port = host, int(port)
self._socket.sendto(data, (host, port))
def _on_data(self, addr, data):
host, port = addr
if not self._process_natpacket(data, addr):
try:
relay = self._relays[(host, int(port))]
self._logger.debug('{}<<{} len: {}'.format(relay.peer_id, addr, len(data)))
relay.send(data)
except KeyError:
self._logger.debug("No relay for data from {}:{}".format(host, port))
def _process_natpacket(self, data, addr):
"""
Process data from given address as a natpacket
Returns true iff it was processed as such
:param data:
:param addr:
:return:
"""
if data.startswith(b'\x08'):
host, port = addr
msg = data[1:].decode()
self.send('ProcessNatPacket',
["{}:{}".format(host, port), msg])
if msg.startswith('Bind'):
peer_id = int(msg[4:])
if (host, port) not in self._socket.bindings:
self._logger.info("Binding {} to {}".format((host, port), peer_id))
self._socket.bind_address((host, port))
self._logger.info("Processed bind request")
else:
self._logger.info("Unknown natpacket")
return True
| gpl-3.0 |
lmprice/ansible | lib/ansible/utils/module_docs_fragments/aci.py | 37 | 2806 | # -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers (@dagwieers) <dag@wieers.com>
# Copyright: (c) 2017, Swetha Chunduri (@schunduri)
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
class ModuleDocFragment(object):
# Standard files documentation fragment
DOCUMENTATION = '''
options:
host:
description:
- IP Address or hostname of APIC resolvable by Ansible control host.
required: yes
aliases: [ hostname ]
port:
description:
- Port number to be used for REST connection.
- The default value depends on parameter `use_ssl`.
username:
description:
- The username to use for authentication.
default: admin
aliases: [ user ]
password:
description:
- The password to use for authentication.
- This option is mutual exclusive with C(private_key). If C(private_key) is provided too, it will be used instead.
required: yes
private_key:
description:
- PEM formatted file that contains your private key to be used for signature-based authentication.
- The name of the key (without extension) is used as the certificate name in ACI, unless C(certificate_name) is specified.
- This option is mutual exclusive with C(password). If C(password) is provided too, it will be ignored.
required: yes
aliases: [ cert_key ]
certificate_name:
description:
- The X.509 certificate name attached to the APIC AAA user used for signature-based authentication.
- It defaults to the C(private_key) basename, without extension.
aliases: [ cert_name ]
output_level:
description:
- Influence the output of this ACI module.
- C(normal) means the standard output, incl. C(current) dict
- C(info) adds informational output, incl. C(previous), C(proposed) and C(sent) dicts
- C(debug) adds debugging output, incl. C(filter_string), C(method), C(response), C(status) and C(url) information
choices: [ debug, info, normal ]
default: normal
timeout:
description:
- The socket level timeout in seconds.
type: int
default: 30
use_proxy:
description:
- If C(no), it will not use a proxy, even if one is defined in an environment variable on the target hosts.
type: bool
default: 'yes'
use_ssl:
description:
- If C(no), an HTTP connection will be used instead of the default HTTPS connection.
type: bool
default: 'yes'
validate_certs:
description:
- If C(no), SSL certificates will not be validated.
- This should only set to C(no) when used on personally controlled sites using self-signed certificates.
type: bool
default: 'yes'
notes:
- Please read the :ref:`aci_guide` for more detailed information on how to manage your ACI infrastructure using Ansible.
'''
| gpl-3.0 |
ycl2045/nova-master | nova/scheduler/rpcapi.py | 8 | 5246 | # Copyright 2013, Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Client side of the scheduler manager RPC API.
"""
from oslo.config import cfg
from oslo import messaging
from nova.objects import base as objects_base
from nova.openstack.common import jsonutils
from nova import rpc
rpcapi_opts = [
cfg.StrOpt('scheduler_topic',
default='scheduler',
help='The topic scheduler nodes listen on'),
]
CONF = cfg.CONF
CONF.register_opts(rpcapi_opts)
rpcapi_cap_opt = cfg.StrOpt('scheduler',
help='Set a version cap for messages sent to scheduler services')
CONF.register_opt(rpcapi_cap_opt, 'upgrade_levels')
class SchedulerAPI(object):
'''Client side of the scheduler rpc API.
API version history:
1.0 - Initial version.
1.1 - Changes to prep_resize():
- remove instance_uuid, add instance
- remove instance_type_id, add instance_type
- remove topic, it was unused
1.2 - Remove topic from run_instance, it was unused
1.3 - Remove instance_id, add instance to live_migration
1.4 - Remove update_db from prep_resize
1.5 - Add reservations argument to prep_resize()
1.6 - Remove reservations argument to run_instance()
1.7 - Add create_volume() method, remove topic from live_migration()
2.0 - Remove 1.x backwards compat
2.1 - Add image_id to create_volume()
2.2 - Remove reservations argument to create_volume()
2.3 - Remove create_volume()
2.4 - Change update_service_capabilities()
- accepts a list of capabilities
2.5 - Add get_backdoor_port()
2.6 - Add select_hosts()
... Grizzly supports message version 2.6. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.6.
2.7 - Add select_destinations()
2.8 - Deprecate prep_resize() -- JUST KIDDING. It is still used
by the compute manager for retries.
2.9 - Added the legacy_bdm_in_spec parameter to run_instance()
... Havana supports message version 2.9. So, any changes to existing
methods in 2.x after that point should be done such that they can
handle the version_cap being set to 2.9.
... - Deprecated live_migration() call, moved to conductor
... - Deprecated select_hosts()
3.0 - Removed backwards compat
'''
VERSION_ALIASES = {
'grizzly': '2.6',
'havana': '2.9',
'icehouse': '3.0',
}
def __init__(self):
super(SchedulerAPI, self).__init__()
target = messaging.Target(topic=CONF.scheduler_topic, version='3.0')
version_cap = self.VERSION_ALIASES.get(CONF.upgrade_levels.scheduler,
CONF.upgrade_levels.scheduler)
serializer = objects_base.NovaObjectSerializer()
self.client = rpc.get_client(target, version_cap=version_cap,
serializer=serializer)
def select_destinations(self, ctxt, request_spec, filter_properties):
cctxt = self.client.prepare()
return cctxt.call(ctxt, 'select_destinations',
request_spec=request_spec, filter_properties=filter_properties)
def run_instance(self, ctxt, request_spec, admin_password,
injected_files, requested_networks, is_first_time,
filter_properties, legacy_bdm_in_spec=True):
msg_kwargs = {'request_spec': request_spec,
'admin_password': admin_password,
'injected_files': injected_files,
'requested_networks': requested_networks,
'is_first_time': is_first_time,
'filter_properties': filter_properties,
'legacy_bdm_in_spec': legacy_bdm_in_spec}
cctxt = self.client.prepare()
cctxt.cast(ctxt, 'run_instance', **msg_kwargs)
def prep_resize(self, ctxt, instance, instance_type, image,
request_spec, filter_properties, reservations):
instance_p = jsonutils.to_primitive(instance)
instance_type_p = jsonutils.to_primitive(instance_type)
reservations_p = jsonutils.to_primitive(reservations)
image_p = jsonutils.to_primitive(image)
cctxt = self.client.prepare()
cctxt.cast(ctxt, 'prep_resize',
instance=instance_p, instance_type=instance_type_p,
image=image_p, request_spec=request_spec,
filter_properties=filter_properties,
reservations=reservations_p)
| apache-2.0 |
hardaker/git-soc | gitSOC/cmd/info.py | 1 | 1114 | #!/usr/bin/python
import yaml
import gitSOC.cmd
import gitSOC.managedRepo
import git
import os
import sys
class Info(gitSOC.cmd.Cmd):
"""Dumps information about the registration of the current directory,
if it has been registered with git-soc (or else say it hasn't been yet).
"""
def __init__(self, soc, baseargs = {}):
gitSOC.cmd.Cmd.__init__(self, soc, baseargs)
def run(self, args):
linkname = os.getcwd() + "/.git/git-soc.yml"
# read the yaml
try:
file = open(linkname, "r")
except:
print("Can not find registration information in .git/git-soc.yml")
exit(1)
# XXX: search for it instead in the full set
try:
out = yaml.load(file, Loader=yaml.FullLoader)
except:
out = yaml.load(file)
# should be prettier than this:
if not out:
print("could not parse the registration information from .git/git-soc.yml")
exit(1)
file = open(linkname, "r")
for line in file:
sys.stdout.write(line)
| gpl-3.0 |
mdietrichc2c/OCB | addons/crm_partner_assign/__openerp__.py | 114 | 2453 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Partner Assignation & Geolocation',
'version': '1.0',
'category': 'Customer Relationship Management',
'description': """
This is the module used by OpenERP SA to redirect customers to its partners, based on geolocation.
======================================================================================================
This modules lets you geolocate Leads, Opportunities and Partners based on their address.
Once the coordinates of the Lead/Opportunity is known, they can be automatically assigned
to an appropriate local partner, based on the distance and the weight that was assigned to the partner.
""",
'author': 'OpenERP SA',
'depends': ['base_geolocalize', 'crm', 'account', 'portal'],
'data': [
'security/ir.model.access.csv',
'res_partner_view.xml',
'wizard/crm_forward_to_partner_view.xml',
'wizard/crm_channel_interested_view.xml',
'crm_lead_view.xml',
'crm_partner_assign_data.xml',
'crm_portal_view.xml',
'portal_data.xml',
'report/crm_lead_report_view.xml',
'report/crm_partner_report_view.xml',
],
'demo': [
'res_partner_demo.xml',
'crm_lead_demo.xml'
],
'test': ['test/partner_assign.yml'],
'installable': True,
'auto_install': False,
'images': ['images/partner_geo_localization.jpeg','images/partner_grade.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
scipy/scipy | scipy/special/_precompute/lambertw.py | 12 | 2001 | """Compute a Pade approximation for the principle branch of the
Lambert W function around 0 and compare it to various other
approximations.
"""
import numpy as np
try:
import mpmath
import matplotlib.pyplot as plt # type: ignore[import]
except ImportError:
pass
def lambertw_pade():
derivs = [mpmath.diff(mpmath.lambertw, 0, n=n) for n in range(6)]
p, q = mpmath.pade(derivs, 3, 2)
return p, q
def main():
print(__doc__)
with mpmath.workdps(50):
p, q = lambertw_pade()
p, q = p[::-1], q[::-1]
print("p = {}".format(p))
print("q = {}".format(q))
x, y = np.linspace(-1.5, 1.5, 75), np.linspace(-1.5, 1.5, 75)
x, y = np.meshgrid(x, y)
z = x + 1j*y
lambertw_std = []
for z0 in z.flatten():
lambertw_std.append(complex(mpmath.lambertw(z0)))
lambertw_std = np.array(lambertw_std).reshape(x.shape)
fig, axes = plt.subplots(nrows=3, ncols=1)
# Compare Pade approximation to true result
p = np.array([float(p0) for p0 in p])
q = np.array([float(q0) for q0 in q])
pade_approx = np.polyval(p, z)/np.polyval(q, z)
pade_err = abs(pade_approx - lambertw_std)
axes[0].pcolormesh(x, y, pade_err)
# Compare two terms of asymptotic series to true result
asy_approx = np.log(z) - np.log(np.log(z))
asy_err = abs(asy_approx - lambertw_std)
axes[1].pcolormesh(x, y, asy_err)
# Compare two terms of the series around the branch point to the
# true result
p = np.sqrt(2*(np.exp(1)*z + 1))
series_approx = -1 + p - p**2/3
series_err = abs(series_approx - lambertw_std)
im = axes[2].pcolormesh(x, y, series_err)
fig.colorbar(im, ax=axes.ravel().tolist())
plt.show()
fig, ax = plt.subplots(nrows=1, ncols=1)
pade_better = pade_err < asy_err
im = ax.pcolormesh(x, y, pade_better)
t = np.linspace(-0.3, 0.3)
ax.plot(-2.5*abs(t) - 0.2, t, 'r')
fig.colorbar(im, ax=ax)
plt.show()
if __name__ == '__main__':
main()
| bsd-3-clause |
mr-karan/coala | tests/results/result_actions/ApplyPatchActionTest.py | 7 | 6616 | import unittest
import os
from os.path import isfile
from coalib.misc.ContextManagers import make_temp
from coalib.results.Diff import Diff
from coalib.results.Result import Result
from coalib.results.result_actions.ApplyPatchAction import ApplyPatchAction
from coalib.settings.Section import Section
class ApplyPatchActionTest(unittest.TestCase):
def test_apply(self):
uut = ApplyPatchAction()
with make_temp() as f_a, make_temp() as f_b, make_temp() as f_c:
file_dict = {
f_a: ["1\n", "2\n", "3\n"],
f_b: ["1\n", "2\n", "3\n"],
f_c: ["1\n", "2\n", "3\n"]
}
expected_file_dict = {
f_a: ["1\n", "3_changed\n"],
f_b: ["1\n", "2\n", "3_changed\n"],
f_c: ["1\n", "2\n", "3\n"]
}
file_diff_dict = {}
diff = Diff(file_dict[f_a])
diff.delete_line(2)
uut.apply_from_section(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict,
Section("t"))
diff = Diff(file_dict[f_a])
diff.change_line(3, "3\n", "3_changed\n")
uut.apply_from_section(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict,
Section("t"))
diff = Diff(file_dict[f_b])
diff.change_line(3, "3\n", "3_changed\n")
uut.apply(Result("origin", "msg", diffs={f_b: diff}),
file_dict,
file_diff_dict)
for filename in file_diff_dict:
file_dict[filename] = file_diff_dict[filename].modified
self.assertEqual(file_dict, expected_file_dict)
with open(f_a) as fa:
self.assertEqual(file_dict[f_a], fa.readlines())
with open(f_b) as fb:
self.assertEqual(file_dict[f_b], fb.readlines())
with open(f_c) as fc:
# File c is unchanged and should be untouched
self.assertEqual([], fc.readlines())
def test_apply_orig_option(self):
uut = ApplyPatchAction()
with make_temp() as f_a, make_temp() as f_b:
file_dict = {
f_a: ["1\n", "2\n", "3\n"],
f_b: ["1\n", "2\n", "3\n"]
}
expected_file_dict = {
f_a: ["1\n", "2\n", "3_changed\n"],
f_b: ["1\n", "2\n", "3_changed\n"]
}
file_diff_dict = {}
diff = Diff(file_dict[f_a])
diff.change_line(3, "3\n", "3_changed\n")
uut.apply(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict,
no_orig=True)
diff = Diff(file_dict[f_b])
diff.change_line(3, "3\n", "3_changed\n")
uut.apply(Result("origin", "msg", diffs={f_b: diff}),
file_dict,
file_diff_dict,
no_orig=False)
self.assertFalse(isfile(f_a+".orig"))
self.assertTrue(isfile(f_b+".orig"))
for filename in file_diff_dict:
file_dict[filename] = file_diff_dict[filename].modified
self.assertEqual(file_dict, expected_file_dict)
def test_apply_rename(self):
uut = ApplyPatchAction()
with make_temp() as f_a:
file_dict = {f_a: ["1\n", "2\n", "3\n"]}
expected_file_dict = {f_a+".renamed":
["1\n", "2_changed\n", "3_changed\n"]}
file_diff_dict = {}
diff = Diff(file_dict[f_a], rename=f_a+".renamed")
diff.change_line(3, "3\n", "3_changed\n")
uut.apply(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict)
self.assertTrue(isfile(f_a+".orig"))
self.assertTrue(isfile(f_a+".renamed"))
self.assertFalse(isfile(f_a))
diff = Diff(file_dict[f_a])
diff.change_line(2, "2\n", "2_changed\n")
uut.apply(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict)
self.assertFalse(isfile(f_a+".renamed.orig"))
file_dict = {f_a+".renamed": open(f_a+".renamed").readlines()}
self.assertEqual(file_dict, expected_file_dict)
# Recreate file so that context manager make_temp() can delete it
open(f_a, 'w').close()
def test_apply_delete(self):
uut = ApplyPatchAction()
with make_temp() as f_a:
file_dict = {f_a: ["1\n", "2\n", "3\n"]}
file_diff_dict = {}
diff = Diff(file_dict[f_a], delete=True)
uut.apply(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict)
self.assertFalse(isfile(f_a))
self.assertTrue(isfile(f_a+".orig"))
os.remove(f_a+".orig")
diff = Diff(file_dict[f_a])
diff.change_line(3, "3\n", "3_changed\n")
uut.apply(Result("origin", "msg", diffs={f_a: diff}),
file_dict,
file_diff_dict)
self.assertFalse(isfile(f_a+".orig"))
# Recreate file so that context manager make_temp() can delete it
open(f_a, 'w').close()
def test_is_applicable(self):
diff = Diff(["1\n", "2\n", "3\n"])
diff.delete_line(2)
patch_result = Result("", "", diffs={'f': diff})
self.assertTrue(
ApplyPatchAction.is_applicable(patch_result, {}, {}))
def test_is_applicable_conflict(self):
diff = Diff(["1\n", "2\n", "3\n"])
diff.add_lines(2, ['a line'])
conflict_result = Result("", "", diffs={'f': diff})
# Applying the same diff twice will result in a conflict
self.assertFalse(
ApplyPatchAction.is_applicable(conflict_result, {}, {'f': diff}))
def test_is_applicable_empty_patch(self):
empty_patch_result = Result("", "", diffs={})
self.assertFalse(
ApplyPatchAction.is_applicable(empty_patch_result, {}, {}))
def test_is_applicable_without_patch(self):
result = Result("", "")
self.assertFalse(ApplyPatchAction.is_applicable(result, {}, {}))
| agpl-3.0 |
victoredwardocallaghan/xen | tools/python/xen/xend/server/vfbif.py | 43 | 3171 | from xen.xend.server.DevController import DevController
from xen.xend.XendLogging import log
from xen.xend.XendError import VmError
import xen.xend
import os
CONFIG_ENTRIES = ['type', 'vncdisplay', 'vnclisten', 'vncpasswd', 'vncunused',
'display', 'xauthority', 'keymap', 'vnc', 'sdl', 'uuid',
'location', 'protocol', 'opengl']
class VfbifController(DevController):
"""Virtual frame buffer controller. Handles all vfb devices for a domain.
Note that we only support a single vfb per domain at the moment.
"""
def __init__(self, vm):
DevController.__init__(self, vm)
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
back = dict([(k, str(config[k])) for k in CONFIG_ENTRIES
if config.has_key(k)])
devid = 0
return (devid, back, {})
def getDeviceConfiguration(self, devid, transaction = None):
result = DevController.getDeviceConfiguration(self, devid, transaction)
if transaction is None:
devinfo = self.readBackend(devid, *CONFIG_ENTRIES)
else:
devinfo = self.readBackendTxn(transaction, devid, *CONFIG_ENTRIES)
return dict([(CONFIG_ENTRIES[i], devinfo[i])
for i in range(len(CONFIG_ENTRIES))
if devinfo[i] is not None])
def waitForDevice(self, devid):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def reconfigureDevice(self, _, config):
""" Only allow appending location information of vnc port into
xenstore."""
if 'location' in config:
(devid, back, front) = self.getDeviceDetails(config)
self.writeBackend(devid, 'location', config['location'])
return back.get('uuid')
raise VmError('Refusing to reconfigure device vfb:%d' % devid)
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
class VkbdifController(DevController):
"""Virtual keyboard controller. Handles all vkbd devices for a domain.
"""
def getDeviceDetails(self, config):
"""@see DevController.getDeviceDetails"""
devid = 0
back = {}
front = {}
return (devid, back, front)
def waitForDevice(self, config):
# is a qemu-dm managed device, don't wait for hotplug for these.
return
def destroyDevice(self, devid, force):
# remove the backend xenstore entries no matter what
# because we kill qemu-dm with extreme prejudice
# not giving it a chance to remove them itself
DevController.destroyDevice(self, devid, True)
def migrate(self, deviceConfig, network, dst, step, domName):
# Handled by qemu-dm so no action needed
return 0
| gpl-2.0 |
nx111/openembeded_openpli2.1_nx111 | contrib/python/generate-manifest-2.5.py | 45 | 14621 | #!/usr/bin/env python
# generate Python Manifest for the OpenEmbedded build system
# (C) 2002-2008 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
# (C) 2007 Jeremy Laine
# licensed under MIT, see COPYING.MIT
import os
import sys
import time
VERSION = "2.5.2"
BASEREV = 0
__author__ = "Michael 'Mickey' Lauer <mlauer@vanille-media.de>"
__version__ = "20081209"
class MakefileMaker:
def __init__( self, outfile ):
"""initialize"""
self.packages = {}
self.targetPrefix = "${libdir}/python%s/" % VERSION[:3]
self.output = outfile
self.out( """
# WARNING: This file is AUTO GENERATED: Manual edits will be lost next time I regenerate the file.
# Generator: '%s' Version %s (C) 2002-2008 Michael 'Mickey' Lauer <mlauer@vanille-media.de>
# Visit the Python for Embedded Systems Site => http://www.Vanille.de/projects/python.spy
""" % ( sys.argv[0], __version__ ) )
#
# helper functions
#
def out( self, data ):
"""print a line to the output file"""
self.output.write( "%s\n" % data )
def setPrefix( self, targetPrefix ):
"""set a file prefix for addPackage files"""
self.targetPrefix = targetPrefix
def doProlog( self ):
self.out( """ """ )
self.out( "" )
def addPackage( self, revision, name, description, dependencies, filenames ):
"""add a package to the Makefile"""
if type( filenames ) == type( "" ):
filenames = filenames.split()
fullFilenames = []
for filename in filenames:
if filename[0] != "$":
fullFilenames.append( "%s%s" % ( self.targetPrefix, filename ) )
else:
fullFilenames.append( filename )
self.packages[name] = revision, description, dependencies, fullFilenames
def doBody( self ):
"""generate body of Makefile"""
global VERSION
#
# generate provides line
#
provideLine = 'PROVIDES+="'
for name in self.packages:
provideLine += "%s " % name
provideLine += '"'
self.out( provideLine )
self.out( "" )
#
# generate package line
#
packageLine = 'PACKAGES="'
for name in self.packages:
packageLine += "%s " % name
packageLine += ' python-modules"'
self.out( packageLine )
self.out( "" )
#
# generate package variables
#
for name, data in self.packages.iteritems():
rev, desc, deps, files = data
#
# write out the description, revision and dependencies
#
self.out( 'DESCRIPTION_%s="%s"' % ( name, desc ) )
self.out( 'PR_%s="ml%d"' % ( name, rev + BASEREV ) )
self.out( 'RDEPENDS_%s="%s"' % ( name, deps ) )
line = 'FILES_%s="' % name
#
# check which directories to make in the temporary directory
#
dirset = {} # if python had a set-datatype this would be sufficient. for now, we're using a dict instead.
for target in files:
dirset[os.path.dirname( target )] = True
#
# generate which files to copy for the target (-dfR because whole directories are also allowed)
#
for target in files:
line += "%s " % target
line += '"'
self.out( line )
self.out( "" )
self.out( 'DESCRIPTION_python-modules="All Python modules"' )
line = 'RDEPENDS_python-modules="'
for name, data in self.packages.iteritems():
if name != 'python-core-dbg':
line += "%s " % name
self.out( "%s \"" % line )
self.out( 'ALLOW_EMPTY_python-modules = "1"' )
def doEpilog( self ):
self.out( """""" )
self.out( "" )
def make( self ):
self.doProlog()
self.doBody()
self.doEpilog()
if __name__ == "__main__":
if len( sys.argv ) > 1:
os.popen( "rm -f ./%s" % sys.argv[1] )
outfile = file( sys.argv[1], "w" )
else:
outfile = sys.stdout
m = MakefileMaker( outfile )
# Add packages here. Only specify dlopen-style library dependencies here, no ldd-style dependencies!
# Parameters: revision, name, description, dependencies, filenames
#
m.addPackage( 0, "python-core", "Python Interpreter and core modules (needed!)", "",
"__future__.* copy.* copy_reg.* ConfigParser.* " +
"getopt.* linecache.* new.* " +
"os.* posixpath.* struct.* " +
"warnings.* site.* stat.* " +
"UserDict.* UserList.* UserString.* " +
"lib-dynload/binascii.so lib-dynload/_struct.so lib-dynload/time.so " +
"lib-dynload/xreadlines.so types.* ${bindir}/python*" )
m.addPackage( 0, "python-core-dbg", "Python core module debug information", "python-core",
"lib-dynload/.debug ${bindir}/.debug ${libdir}/.debug" )
m.addPackage( 0, "python-devel", "Python Development Package", "python-core",
"${includedir} config" ) # package
m.addPackage( 0, "python-idle", "Python Integrated Development Environment", "python-core python-tkinter",
"${bindir}/idle idlelib" ) # package
m.addPackage( 0, "python-pydoc", "Python Interactive Help Support", "python-core python-lang python-stringold python-re",
"${bindir}/pydoc pydoc.*" )
m.addPackage( 0, "python-smtpd", "Python Simple Mail Transport Daemon", "python-core python-netserver python-email python-mime",
"${bindir}/smtpd.*" )
m.addPackage( 0, "python-audio", "Python Audio Handling", "python-core",
"wave.* chunk.* sndhdr.* lib-dynload/ossaudiodev.so lib-dynload/audioop.so" )
m.addPackage( 0, "python-bsddb", "Python Berkeley Database Bindings", "python-core",
"bsddb lib-dynload/_bsddb.so" ) # package
m.addPackage( 0, "python-codecs", "Python Codecs, Encodings & i18n Support", "python-core python-lang",
"codecs.* encodings gettext.* locale.* lib-dynload/_locale.so lib-dynload/unicodedata.so stringprep.* xdrlib.*" )
m.addPackage( 0, "python-compile", "Python Bytecode Compilation Support", "python-core",
"py_compile.* compileall.*" )
m.addPackage( 0, "python-compiler", "Python Compiler Support", "python-core",
"compiler" ) # package
m.addPackage( 0, "python-compression", "Python High Level Compression Support", "python-core python-zlib",
"gzip.* zipfile.* tarfile.*" )
m.addPackage( 0, "python-crypt", "Python Basic Cryptographic and Hashing Support", "python-core",
"hashlib.* md5.* sha.* lib-dynload/crypt.so lib-dynload/_hashlib.so lib-dynload/_sha256.so lib-dynload/_sha512.so" )
m.addPackage( 0, "python-textutils", "Python Option Parsing, Text Wrapping and Comma-Separated-Value Support", "python-core python-io python-re python-stringold",
"lib-dynload/_csv.so csv.* optparse.* textwrap.*" )
m.addPackage( 0, "python-curses", "Python Curses Support", "python-core",
"curses lib-dynload/_curses.so lib-dynload/_curses_panel.so" ) # directory + low level module
m.addPackage( 0, "python-ctypes", "Python C Types Support", "python-core",
"ctypes lib-dynload/_ctypes.so" ) # directory + low level module
m.addPackage( 0, "python-datetime", "Python Calendar and Time support", "python-core python-codecs",
"_strptime.* calendar.* lib-dynload/datetime.so" )
m.addPackage( 0, "python-db", "Python File-Based Database Support", "python-core",
"anydbm.* dumbdbm.* whichdb.* " )
m.addPackage( 0, "python-debugger", "Python Debugger", "python-core python-io python-lang python-re python-stringold python-shell python-pprint",
"bdb.* pdb.*" )
m.addPackage( 0, "python-difflib", "Python helpers for computing deltas between objects.", "python-lang python-re",
"difflib.*" )
m.addPackage( 0, "python-distutils", "Python Distribution Utilities", "python-core",
"config distutils" ) # package
m.addPackage( 0, "python-doctest", "Python framework for running examples in docstrings.", "python-core python-lang python-io python-re python-unittest python-debugger python-difflib",
"doctest.*" )
m.addPackage( 0, "python-email", "Python Email Support", "python-core python-io python-re python-mime python-audio python-image",
"email" ) # package
m.addPackage( 0, "python-fcntl", "Python's fcntl Interface", "python-core",
"lib-dynload/fcntl.so" )
m.addPackage( 0, "python-hotshot", "Python Hotshot Profiler", "python-core",
"hotshot lib-dynload/_hotshot.so" )
m.addPackage( 0, "python-html", "Python HTML Processing", "python-core",
"formatter.* htmlentitydefs.* htmllib.* markupbase.* sgmllib.* " )
m.addPackage( 0, "python-gdbm", "Python GNU Database Support", "python-core",
"lib-dynload/gdbm.so" )
m.addPackage( 0, "python-image", "Python Graphical Image Handling", "python-core",
"colorsys.* imghdr.* lib-dynload/imageop.so lib-dynload/rgbimg.so" )
m.addPackage( 0, "python-io", "Python Low-Level I/O", "python-core python-math",
"lib-dynload/_socket.so lib-dynload/_ssl.so lib-dynload/select.so lib-dynload/termios.so lib-dynload/cStringIO.so "
"pipes.* socket.* tempfile.* StringIO.* " )
m.addPackage( 0, "python-lang", "Python Low-Level Language Support", "python-core",
"lib-dynload/array.so lib-dynload/parser.so lib-dynload/operator.so lib-dynload/_weakref.so " +
"lib-dynload/itertools.so lib-dynload/collections.so lib-dynload/_bisect.so lib-dynload/_heapq.so " +
"atexit.* bisect.* code.* codeop.* dis.* heapq.* inspect.* keyword.* opcode.* symbol.* repr.* token.* " +
" tokenize.* traceback.* linecache.* weakref.*" )
m.addPackage( 0, "python-logging", "Python Logging Support", "python-core python-io python-lang python-pickle python-stringold",
"logging" ) # package
m.addPackage( 0, "python-tkinter", "Python Tcl/Tk Bindings", "python-core",
"lib-dynload/_tkinter.so lib-tk" ) # package
m.addPackage( 0, "python-math", "Python Math Support", "python-core",
"lib-dynload/cmath.so lib-dynload/math.so lib-dynload/_random.so random.* sets.*" )
m.addPackage( 0, "python-mime", "Python MIME Handling APIs", "python-core python-io",
"mimetools.* uu.* quopri.* rfc822.*" )
m.addPackage( 0, "python-mmap", "Python Memory-Mapped-File Support", "python-core python-io",
"lib-dynload/mmap.so " )
m.addPackage( 0, "python-unixadmin", "Python Unix Administration Support", "python-core",
"lib-dynload/nis.so lib-dynload/grp.so lib-dynload/pwd.so getpass.*" )
m.addPackage( 0, "python-netclient", "Python Internet Protocol Clients", "python-core python-crypt python-datetime python-io python-lang python-logging python-mime",
"*Cookie*.* " +
"base64.* cookielib.* ftplib.* gopherlib.* hmac.* httplib.* mimetypes.* nntplib.* poplib.* smtplib.* telnetlib.* urllib.* urllib2.* urlparse.* uuid.*" )
m.addPackage( 0, "python-netserver", "Python Internet Protocol Servers", "python-core python-netclient",
"cgi.* BaseHTTPServer.* SimpleHTTPServer.* SocketServer.*" )
m.addPackage( 0, "python-pickle", "Python Persistence Support", "python-core python-codecs python-io python-re",
"pickle.* shelve.* lib-dynload/cPickle.so" )
m.addPackage( 0, "python-pkgutil", "Python Package Extension Utility Support", "python-core",
"pkgutil.*")
m.addPackage( 0, "python-pprint", "Python Pretty-Print Support", "python-core",
"pprint.*" )
m.addPackage( 0, "python-profile", "Python Basic Profiling Support", "python-core python-textutils",
"profile.* pstats.* cProfile.* lib-dynload/_lsprof.so" )
m.addPackage( 0, "python-re", "Python Regular Expression APIs", "python-core",
"re.* sre.* sre_compile.* sre_constants* sre_parse.*" ) # _sre is builtin
m.addPackage( 0, "python-readline", "Python Readline Support", "python-core",
"lib-dynload/readline.so rlcompleter.*" )
m.addPackage( 0, "python-resource", "Python Resource Control Interface", "python-core",
"lib-dynload/resource.so" )
m.addPackage( 0, "python-shell", "Python Shell-Like Functionality", "python-core python-re",
"cmd.* commands.* dircache.* fnmatch.* glob.* popen2.* shlex.* shutil.*" )
m.addPackage( 0, "python-robotparser", "Python robots.txt parser", "python-core python-netclient",
"robotparser.*")
m.addPackage( 0, "python-subprocess", "Python Subprocess Support", "python-core python-io python-re python-fcntl python-pickle",
"subprocess.*" )
m.addPackage( 0, "python-sqlite3", "Python Sqlite3 Database Support", "python-core python-datetime python-lang python-crypt python-io python-threading python-zlib",
"lib-dynload/_sqlite3.so sqlite3/dbapi2.* sqlite3/__init__.*" )
m.addPackage( 0, "python-sqlite3-tests", "Python Sqlite3 Database Support Tests", "python-core python-sqlite3",
"sqlite3/test" )
m.addPackage( 0, "python-stringold", "Python String APIs [deprecated]", "python-core python-re",
"lib-dynload/strop.so string.*" )
m.addPackage( 0, "python-syslog", "Python's Syslog Interface", "python-core",
"lib-dynload/syslog.so" )
m.addPackage( 0, "python-terminal", "Python Terminal Controlling Support", "python-core python-io",
"pty.* tty.*" )
m.addPackage( 0, "python-tests", "Python Tests", "python-core",
"test" ) # package
m.addPackage( 0, "python-threading", "Python Threading & Synchronization Support", "python-core python-lang",
"_threading_local.* dummy_thread.* dummy_threading.* mutex.* threading.* Queue.*" )
m.addPackage( 0, "python-unittest", "Python Unit Testing Framework", "python-core python-stringold python-lang",
"unittest.*" )
m.addPackage( 0, "python-xml", "Python basic XML support.", "python-core python-re",
"lib-dynload/pyexpat.so xml xmllib.*" ) # package
m.addPackage( 0, "python-xmlrpc", "Python XMLRPC Support", "python-core python-xml python-netserver python-lang",
"xmlrpclib.* SimpleXMLRPCServer.*" )
m.addPackage( 0, "python-zlib", "Python zlib Support.", "python-core",
"lib-dynload/zlib.so" )
m.addPackage( 0, "python-mailbox", "Python Mailbox Format Support", "python-core python-mime",
"mailbox.*" )
# FIXME consider adding to python-compression
m.addPackage( 0, "python-bzip2", "Python bzip2 support", "python-core",
"lib-dynload/bz2.so" )
# FIXME consider adding to some higher level package
m.addPackage( 0, "python-elementtree", "Python elementree", "python-core",
"lib-dynload/_elementtree.so" )
m.make()
| mit |
raysinensis/tcgaAPP | backup/createdb.py | 2 | 1660 | from sqlalchemy import create_engine,MetaData
from sqlalchemy.ext.automap import automap_base
import pandas as pd
##csv to sql
engine = create_engine('sqlite:///static/database/methyl.db')
df = pd.read_csv('./static/methylation db.csv')
df.to_sql(name='methyl', con=engine)
##query from db
metadata = MetaData(engine)
Base = automap_base()
Base.prepare(engine, reflect=True)
#engine.table_names() ##checking table names
#methyldb = Table('methyl',metadata, autoload=True)
#print methyldb.c ##to see column names
Session = sessionmaker(bind=engine)
methyldb = Session()
from sqlalchemy import text
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///static/database/methyl.db')
Session = sessionmaker(bind=engine)
methyldb = Session()
gene = 'ZFP36L1'
qcol = ['BRCA','COAD','GBM','KICH','LUAD','PAAD','SARC','STAD']
qcolstr = ','.join(qcol)
sqlstr = 'select '+qcolstr+ ' from methyl where gene=\"'+gene+'\"'
sqlcmd = text(sqlstr)
result = methyldb.execute(sqlcmd).fetchall()
##cox coeff from xlsx
trying=pd.read_excel('/home/rf/Downloads/peerj-03-1499-s001.xlsx',sheetname=None )
tempgenes=[]
for cancer in trying.keys():
tempgenes.extend((trying[cancer])['Gene Name'].tolist())
dfcox=pd.DataFrame()
dfcox['Gene Name']=list(set(tempgenes))
for cancer in trying.keys():
tempdf=(trying[cancer])[['Gene Name','Raw Cox Coefficient']].drop_duplicates('Gene Name')
dfcox=dfcox.merge(tempdf,on='Gene Name',how='left')
dfcox.rename(columns={'Raw Cox Coefficient':cancer},inplace=True)
dfcox.rename(columns={'Gene Name':'Gene'},inplace=True)
engine = create_engine('sqlite:///static/database/methyl.db')
dfcox.to_sql(name='cox', con=engine)
| mit |
alubbe/FrameworkBenchmarks | frameworks/Python/AsyncIO/aiohttp.web/hello/endpoints/world.py | 23 | 2662 | import logging
import asyncio
from aiohttp.web import Response
from api_hour.plugins.aiohttp import JSON
import aiohttp_jinja2
from ..services import queries_number
from ..services.world import get_random_record, get_random_records, update_random_records, get_fortunes
from ..services import redis
LOG = logging.getLogger(__name__)
@asyncio.coroutine
def json(request):
"""Test type 1: JSON serialization"""
return JSON({'message': 'Hello, World!'})
@asyncio.coroutine
def db(request):
"""Test type 2: Single database query"""
container = request.app.ah_container
return JSON((yield from get_random_record(container)))
@asyncio.coroutine
def db_redis(request):
"""Test type 2: Single database query"""
container = request.app.ah_container
return JSON((yield from redis.get_random_record(container)))
@asyncio.coroutine
def queries(request):
"""Test type 3: Multiple database queries"""
container = request.app.ah_container
limit = queries_number(request.GET.get('queries', 1))
return JSON((yield from get_random_records(container, limit)))
@asyncio.coroutine
def queries_redis(request):
"""Test type 3: Multiple database queries"""
container = request.app.ah_container
limit = queries_number(request.GET.get('queries', 1))
return JSON((yield from redis.get_random_records(container, limit)))
@asyncio.coroutine
def fortunes(request):
"""Test type 4: Fortunes"""
container = request.app.ah_container
return aiohttp_jinja2.render_template('fortunes.html.j2',
request,
{'fortunes': (yield from get_fortunes(container))})
@asyncio.coroutine
def fortunes_redis(request):
"""Test type 4: Fortunes"""
container = request.app.ah_container
return aiohttp_jinja2.render_template('fortunes.html.j2',
request,
{'fortunes': (yield from redis.get_fortunes(container))})
@asyncio.coroutine
def updates(request):
"""Test type 5: Database updates"""
container = request.app.ah_container
limit = queries_number(request.GET.get('queries', 1))
return JSON((yield from update_random_records(container, limit)))
@asyncio.coroutine
def updates_redis(request):
"""Test type 5: Database updates"""
container = request.app.ah_container
limit = queries_number(request.GET.get('queries', 1))
return JSON((yield from redis.update_random_records(container, limit)))
@asyncio.coroutine
def plaintext(request):
"""Test type 6: Plaintext"""
return Response(text='Hello, World!') | bsd-3-clause |
dsajkl/123 | common/lib/xmodule/xmodule/modulestore/tests/test_mongo.py | 2 | 30031 | # pylint: disable=E1101
# pylint: disable=W0212
# pylint: disable=E0611
from nose.tools import assert_equals, assert_raises, \
assert_not_equals, assert_false, assert_true, assert_greater, assert_is_instance, assert_is_none
# pylint: enable=E0611
from path import path
import pymongo
import logging
import shutil
from tempfile import mkdtemp
from uuid import uuid4
from datetime import datetime
from pytz import UTC
import unittest
from xblock.core import XBlock
from xblock.fields import Scope, Reference, ReferenceList, ReferenceValueDict
from xblock.runtime import KeyValueStore
from xblock.exceptions import InvalidScopeError
from xblock.plugin import Plugin
from xmodule.tests import DATA_DIR
from opaque_keys.edx.locations import Location
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.mongo import MongoKeyValueStore
from xmodule.modulestore.draft import DraftModuleStore
from opaque_keys.edx.locations import SlashSeparatedCourseKey, AssetLocation
from opaque_keys.edx.keys import UsageKey
from xmodule.modulestore.xml_exporter import export_to_xml
from xmodule.modulestore.xml_importer import import_from_xml, perform_xlint
from xmodule.contentstore.mongo import MongoContentStore
from nose.tools import assert_in
from xmodule.exceptions import NotFoundError
from git.test.lib.asserts import assert_not_none
from xmodule.x_module import XModuleMixin
from xmodule.modulestore.mongo.base import as_draft
from xmodule.modulestore.tests.mongo_connection import MONGO_PORT_NUM, MONGO_HOST
from xmodule.modulestore.edit_info import EditInfoMixin
log = logging.getLogger(__name__)
HOST = MONGO_HOST
PORT = MONGO_PORT_NUM
DB = 'test_mongo_%s' % uuid4().hex[:5]
COLLECTION = 'modulestore'
ASSET_COLLECTION = 'assetstore'
FS_ROOT = DATA_DIR # TODO (vshnayder): will need a real fs_root for testing load_item
DEFAULT_CLASS = 'xmodule.raw_module.RawDescriptor'
RENDER_TEMPLATE = lambda t_n, d, ctx = None, nsp = 'main': ''
class ReferenceTestXBlock(XBlock, XModuleMixin):
"""
Test xblock type to test the reference field types
"""
has_children = True
reference_link = Reference(default=None, scope=Scope.content)
reference_list = ReferenceList(scope=Scope.content)
reference_dict = ReferenceValueDict(scope=Scope.settings)
class TestMongoModuleStoreBase(unittest.TestCase):
'''
Basic setup for all tests
'''
# Explicitly list the courses to load (don't want the big one)
courses = ['toy', 'simple', 'simple_with_draft', 'test_unicode']
@classmethod
def setupClass(cls):
cls.connection = pymongo.MongoClient(
host=HOST,
port=PORT,
tz_aware=True,
document_class=dict,
)
# NOTE: Creating a single db for all the tests to save time. This
# is ok only as long as none of the tests modify the db.
# If (when!) that changes, need to either reload the db, or load
# once and copy over to a tmp db for each test.
cls.content_store, cls.draft_store = cls.initdb()
@classmethod
def teardownClass(cls):
# cls.patcher.stop()
if cls.connection:
cls.connection.drop_database(DB)
cls.connection.close()
@classmethod
def add_asset_collection(cls, doc_store_config):
"""
No asset collection.
"""
pass
@classmethod
def initdb(cls):
# connect to the db
doc_store_config = {
'host': HOST,
'port': PORT,
'db': DB,
'collection': COLLECTION,
}
cls.add_asset_collection(doc_store_config)
# since MongoModuleStore and MongoContentStore are basically assumed to be together, create this class
# as well
content_store = MongoContentStore(HOST, DB, port=PORT)
#
# Also test draft store imports
#
draft_store = DraftModuleStore(
content_store,
doc_store_config, FS_ROOT, RENDER_TEMPLATE,
default_class=DEFAULT_CLASS,
branch_setting_func=lambda: ModuleStoreEnum.Branch.draft_preferred,
xblock_mixins=(EditInfoMixin,)
)
import_from_xml(
draft_store,
999,
DATA_DIR,
cls.courses,
static_content_store=content_store
)
# also test a course with no importing of static content
import_from_xml(
draft_store,
999,
DATA_DIR,
['test_import_course'],
static_content_store=content_store,
do_import_static=False,
verbose=True
)
return content_store, draft_store
@staticmethod
def destroy_db(connection):
# Destroy the test db.
connection.drop_database(DB)
@classmethod
def setUp(cls):
cls.dummy_user = ModuleStoreEnum.UserID.test
@classmethod
def tearDown(cls):
pass
class TestMongoModuleStore(TestMongoModuleStoreBase):
'''Module store tests'''
@classmethod
def add_asset_collection(cls, doc_store_config):
"""
No asset collection - it's not used in the tests below.
"""
pass
@classmethod
def setupClass(cls):
super(TestMongoModuleStore, cls).setupClass()
@classmethod
def teardownClass(cls):
super(TestMongoModuleStore, cls).teardownClass()
def test_init(self):
'''Make sure the db loads'''
ids = list(self.connection[DB][COLLECTION].find({}, {'_id': True}))
assert_greater(len(ids), 12)
def test_mongo_modulestore_type(self):
store = DraftModuleStore(
None,
{'host': HOST, 'db': DB, 'port': PORT, 'collection': COLLECTION},
FS_ROOT, RENDER_TEMPLATE, default_class=DEFAULT_CLASS
)
assert_equals(store.get_modulestore_type(''), ModuleStoreEnum.Type.mongo)
def test_get_courses(self):
'''Make sure the course objects loaded properly'''
courses = self.draft_store.get_courses()
assert_equals(len(courses), 6)
course_ids = [course.id for course in courses]
for course_key in [
SlashSeparatedCourseKey(*fields)
for fields in [
['edX', 'simple', '2012_Fall'], ['edX', 'simple_with_draft', '2012_Fall'],
['edX', 'test_import_course', '2012_Fall'], ['edX', 'test_unicode', '2012_Fall'],
['edX', 'toy', '2012_Fall']
]
]:
assert_in(course_key, course_ids)
course = self.draft_store.get_course(course_key)
assert_not_none(course)
assert_true(self.draft_store.has_course(course_key))
mix_cased = SlashSeparatedCourseKey(
course_key.org.upper(), course_key.course.upper(), course_key.run.lower()
)
assert_false(self.draft_store.has_course(mix_cased))
assert_true(self.draft_store.has_course(mix_cased, ignore_case=True))
def test_no_such_course(self):
"""
Test get_course and has_course with ids which don't exist
"""
for course_key in [
SlashSeparatedCourseKey(*fields)
for fields in [
['edX', 'simple', 'no_such_course'], ['edX', 'no_such_course', '2012_Fall'],
['NO_SUCH_COURSE', 'Test_iMport_courSe', '2012_Fall'],
]
]:
course = self.draft_store.get_course(course_key)
assert_is_none(course)
assert_false(self.draft_store.has_course(course_key))
mix_cased = SlashSeparatedCourseKey(
course_key.org.lower(), course_key.course.upper(), course_key.run.upper()
)
assert_false(self.draft_store.has_course(mix_cased))
assert_false(self.draft_store.has_course(mix_cased, ignore_case=True))
def test_loads(self):
assert_not_none(
self.draft_store.get_item(Location('edX', 'toy', '2012_Fall', 'course', '2012_Fall'))
)
assert_not_none(
self.draft_store.get_item(Location('edX', 'simple', '2012_Fall', 'course', '2012_Fall')),
)
assert_not_none(
self.draft_store.get_item(Location('edX', 'toy', '2012_Fall', 'video', 'Welcome')),
)
def test_unicode_loads(self):
"""
Test that getting items from the test_unicode course works
"""
assert_not_none(
self.draft_store.get_item(Location('edX', 'test_unicode', '2012_Fall', 'course', '2012_Fall')),
)
# All items with ascii-only filenames should load properly.
assert_not_none(
self.draft_store.get_item(Location('edX', 'test_unicode', '2012_Fall', 'video', 'Welcome')),
)
assert_not_none(
self.draft_store.get_item(Location('edX', 'test_unicode', '2012_Fall', 'video', 'Welcome')),
)
assert_not_none(
self.draft_store.get_item(Location('edX', 'test_unicode', '2012_Fall', 'chapter', 'Overview')),
)
def test_find_one(self):
assert_not_none(
self.draft_store._find_one(Location('edX', 'toy', '2012_Fall', 'course', '2012_Fall')),
)
assert_not_none(
self.draft_store._find_one(Location('edX', 'simple', '2012_Fall', 'course', '2012_Fall')),
)
assert_not_none(
self.draft_store._find_one(Location('edX', 'toy', '2012_Fall', 'video', 'Welcome')),
)
def test_xlinter(self):
'''
Run through the xlinter, we know the 'toy' course has violations, but the
number will continue to grow over time, so just check > 0
'''
assert_not_equals(perform_xlint(DATA_DIR, ['toy']), 0)
def test_get_courses_has_no_templates(self):
courses = self.draft_store.get_courses()
for course in courses:
assert_false(
course.location.org == 'edx' and course.location.course == 'templates',
'{0} is a template course'.format(course)
)
def test_static_tab_names(self):
def get_tab_name(index):
"""
Helper function for pulling out the name of a given static tab.
Assumes the information is desired for courses[4] ('toy' course).
"""
course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'toy', '2012_Fall'))
return course.tabs[index]['name']
# There was a bug where model.save was not getting called after the static tab name
# was set set for tabs that have a URL slug. 'Syllabus' and 'Resources' fall into that
# category, but for completeness, I'm also testing 'Course Info' and 'Discussion' (no url slug).
assert_equals('Course Info', get_tab_name(1))
assert_equals('Syllabus', get_tab_name(2))
assert_equals('Resources', get_tab_name(3))
assert_equals('Discussion', get_tab_name(4))
def test_contentstore_attrs(self):
"""
Test getting, setting, and defaulting the locked attr and arbitrary attrs.
"""
location = Location('edX', 'toy', '2012_Fall', 'course', '2012_Fall')
course_content, __ = self.content_store.get_all_content_for_course(location.course_key)
assert_true(len(course_content) > 0)
# a bit overkill, could just do for content[0]
for content in course_content:
assert not content.get('locked', False)
asset_key = AssetLocation._from_deprecated_son(content.get('content_son', content['_id']), location.run)
assert not self.content_store.get_attr(asset_key, 'locked', False)
attrs = self.content_store.get_attrs(asset_key)
assert_in('uploadDate', attrs)
assert not attrs.get('locked', False)
self.content_store.set_attr(asset_key, 'locked', True)
assert self.content_store.get_attr(asset_key, 'locked', False)
attrs = self.content_store.get_attrs(asset_key)
assert_in('locked', attrs)
assert attrs['locked'] is True
self.content_store.set_attrs(asset_key, {'miscel': 99})
assert_equals(self.content_store.get_attr(asset_key, 'miscel'), 99)
asset_key = AssetLocation._from_deprecated_son(
course_content[0].get('content_son', course_content[0]['_id']),
location.run
)
assert_raises(
AttributeError, self.content_store.set_attr, asset_key,
'md5', 'ff1532598830e3feac91c2449eaa60d6'
)
assert_raises(
AttributeError, self.content_store.set_attrs, asset_key,
{'foo': 9, 'md5': 'ff1532598830e3feac91c2449eaa60d6'}
)
assert_raises(
NotFoundError, self.content_store.get_attr,
Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'),
'displayname'
)
assert_raises(
NotFoundError, self.content_store.set_attr,
Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'),
'displayname', 'hello'
)
assert_raises(
NotFoundError, self.content_store.get_attrs,
Location('bogus', 'bogus', 'bogus', 'asset', 'bogus')
)
assert_raises(
NotFoundError, self.content_store.set_attrs,
Location('bogus', 'bogus', 'bogus', 'asset', 'bogus'),
{'displayname': 'hello'}
)
assert_raises(
NotFoundError, self.content_store.set_attrs,
Location('bogus', 'bogus', 'bogus', 'asset', None),
{'displayname': 'hello'}
)
def test_get_courses_for_wiki(self):
"""
Test the get_courses_for_wiki method
"""
for course_number in self.courses:
course_locations = self.draft_store.get_courses_for_wiki(course_number)
assert_equals(len(course_locations), 1)
assert_equals(SlashSeparatedCourseKey('edX', course_number, '2012_Fall'), course_locations[0])
course_locations = self.draft_store.get_courses_for_wiki('no_such_wiki')
assert_equals(len(course_locations), 0)
# set toy course to share the wiki with simple course
toy_course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'toy', '2012_Fall'))
toy_course.wiki_slug = 'simple'
self.draft_store.update_item(toy_course, ModuleStoreEnum.UserID.test)
# now toy_course should not be retrievable with old wiki_slug
course_locations = self.draft_store.get_courses_for_wiki('toy')
assert_equals(len(course_locations), 0)
# but there should be two courses with wiki_slug 'simple'
course_locations = self.draft_store.get_courses_for_wiki('simple')
assert_equals(len(course_locations), 2)
for course_number in ['toy', 'simple']:
assert_in(SlashSeparatedCourseKey('edX', course_number, '2012_Fall'), course_locations)
# configure simple course to use unique wiki_slug.
simple_course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'simple', '2012_Fall'))
simple_course.wiki_slug = 'edX.simple.2012_Fall'
self.draft_store.update_item(simple_course, ModuleStoreEnum.UserID.test)
# it should be retrievable with its new wiki_slug
course_locations = self.draft_store.get_courses_for_wiki('edX.simple.2012_Fall')
assert_equals(len(course_locations), 1)
assert_in(SlashSeparatedCourseKey('edX', 'simple', '2012_Fall'), course_locations)
@Plugin.register_temp_plugin(ReferenceTestXBlock, 'ref_test')
def test_reference_converters(self):
"""
Test that references types get deserialized correctly
"""
course_key = SlashSeparatedCourseKey('edX', 'toy', '2012_Fall')
def setup_test():
course = self.draft_store.get_course(course_key)
# can't use item factory as it depends on django settings
p1ele = self.draft_store.create_item(
99,
course_key,
'problem',
block_id='p1',
runtime=course.runtime
)
p2ele = self.draft_store.create_item(
99,
course_key,
'problem',
block_id='p2',
runtime=course.runtime
)
self.refloc = course.id.make_usage_key('ref_test', 'ref_test')
self.draft_store.create_item(
99,
self.refloc.course_key,
self.refloc.block_type,
block_id=self.refloc.block_id,
runtime=course.runtime,
fields={
'reference_link': p1ele.location,
'reference_list': [p1ele.location, p2ele.location],
'reference_dict': {'p1': p1ele.location, 'p2': p2ele.location},
'children': [p1ele.location, p2ele.location],
}
)
def check_xblock_fields():
def check_children(xblock):
for child in xblock.children:
assert_is_instance(child, UsageKey)
course = self.draft_store.get_course(course_key)
check_children(course)
refele = self.draft_store.get_item(self.refloc)
check_children(refele)
assert_is_instance(refele.reference_link, UsageKey)
assert_greater(len(refele.reference_list), 0)
for ref in refele.reference_list:
assert_is_instance(ref, UsageKey)
assert_greater(len(refele.reference_dict), 0)
for ref in refele.reference_dict.itervalues():
assert_is_instance(ref, UsageKey)
def check_mongo_fields():
def get_item(location):
return self.draft_store._find_one(as_draft(location))
def check_children(payload):
for child in payload['definition']['children']:
assert_is_instance(child, basestring)
refele = get_item(self.refloc)
check_children(refele)
assert_is_instance(refele['definition']['data']['reference_link'], basestring)
assert_greater(len(refele['definition']['data']['reference_list']), 0)
for ref in refele['definition']['data']['reference_list']:
assert_is_instance(ref, basestring)
assert_greater(len(refele['metadata']['reference_dict']), 0)
for ref in refele['metadata']['reference_dict'].itervalues():
assert_is_instance(ref, basestring)
setup_test()
check_xblock_fields()
check_mongo_fields()
def test_export_course_image(self):
"""
Test to make sure that we have a course image in the contentstore,
then export it to ensure it gets copied to both file locations.
"""
course_key = SlashSeparatedCourseKey('edX', 'simple', '2012_Fall')
location = course_key.make_asset_key('asset', 'images_course_image.jpg')
# This will raise if the course image is missing
self.content_store.find(location)
root_dir = path(mkdtemp())
try:
export_to_xml(self.draft_store, self.content_store, course_key, root_dir, 'test_export')
assert_true(path(root_dir / 'test_export/static/images/course_image.jpg').isfile())
assert_true(path(root_dir / 'test_export/static/images_course_image.jpg').isfile())
finally:
shutil.rmtree(root_dir)
def test_export_course_image_nondefault(self):
"""
Make sure that if a non-default image path is specified that we
don't export it to the static default location
"""
course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'toy', '2012_Fall'))
assert_true(course.course_image, 'just_a_test.jpg')
root_dir = path(mkdtemp())
try:
export_to_xml(self.draft_store, self.content_store, course.id, root_dir, 'test_export')
assert_true(path(root_dir / 'test_export/static/just_a_test.jpg').isfile())
assert_false(path(root_dir / 'test_export/static/images/course_image.jpg').isfile())
finally:
shutil.rmtree(root_dir)
def test_course_without_image(self):
"""
Make sure we elegantly passover our code when there isn't a static
image
"""
course = self.draft_store.get_course(SlashSeparatedCourseKey('edX', 'simple_with_draft', '2012_Fall'))
root_dir = path(mkdtemp())
try:
export_to_xml(self.draft_store, self.content_store, course.id, root_dir, 'test_export')
assert_false(path(root_dir / 'test_export/static/images/course_image.jpg').isfile())
assert_false(path(root_dir / 'test_export/static/images_course_image.jpg').isfile())
finally:
shutil.rmtree(root_dir)
def _create_test_tree(self, name, user_id=None):
"""
Creates and returns a tree with the following structure:
Grandparent
Parent Sibling
Parent
Child
Child Sibling
"""
if user_id is None:
user_id = self.dummy_user
org = 'edX'
course = 'tree{}'.format(name)
run = name
if not self.draft_store.has_course(SlashSeparatedCourseKey(org, course, run)):
self.draft_store.create_course(org, course, run, user_id)
locations = {
'grandparent': Location(org, course, run, 'chapter', 'grandparent'),
'parent_sibling': Location(org, course, run, 'sequential', 'parent_sibling'),
'parent': Location(org, course, run, 'sequential', 'parent'),
'child_sibling': Location(org, course, run, 'vertical', 'child_sibling'),
'child': Location(org, course, run, 'vertical', 'child'),
}
for key in locations:
self.draft_store.create_item(
user_id,
locations[key].course_key,
locations[key].block_type,
block_id=locations[key].block_id
)
grandparent = self.draft_store.get_item(locations['grandparent'])
grandparent.children += [locations['parent_sibling'], locations['parent']]
self.draft_store.update_item(grandparent, user_id=user_id)
parent = self.draft_store.get_item(locations['parent'])
parent.children += [locations['child_sibling'], locations['child']]
self.draft_store.update_item(parent, user_id=user_id)
self.draft_store.publish(locations['parent'], user_id)
self.draft_store.publish(locations['parent_sibling'], user_id)
return locations
def test_migrate_published_info(self):
"""
Tests that blocks that were storing published_date and published_by through CMSBlockMixin are loaded correctly
"""
# Insert the test block directly into the module store
location = Location('edX', 'migration', '2012_Fall', 'html', 'test_html')
published_date = datetime(1970, 1, 1, tzinfo=UTC)
published_by = 123
self.draft_store._update_single_item(
as_draft(location),
{
'definition.data': {},
'metadata': {
# published_date was previously stored as a list of time components, not a datetime
'published_date': list(published_date.timetuple()),
'published_by': published_by,
},
},
allow_not_found=True,
)
# Retrieve the block and verify its fields
component = self.draft_store.get_item(location)
self.assertEqual(component.published_on, published_date)
self.assertEqual(component.published_by, published_by)
def test_export_course_with_peer_component(self):
"""
Test export course when link_to_location is given in peer grading interface settings.
"""
name = "export_peer_component"
locations = self._create_test_tree(name)
# Insert the test block directly into the module store
problem_location = Location('edX', 'tree{}'.format(name), name, 'combinedopenended', 'test_peer_problem')
self.draft_store.create_child(
self.dummy_user,
locations["child"],
problem_location.block_type,
block_id=problem_location.block_id
)
interface_location = Location('edX', 'tree{}'.format(name), name, 'peergrading', 'test_peer_interface')
self.draft_store.create_child(
self.dummy_user,
locations["child"],
interface_location.block_type,
block_id=interface_location.block_id
)
self.draft_store._update_single_item(
as_draft(interface_location),
{
'definition.data': {},
'metadata': {
'link_to_location': unicode(problem_location),
'use_for_single_location': True,
},
},
)
component = self.draft_store.get_item(interface_location)
self.assertEqual(unicode(component.link_to_location), unicode(problem_location))
root_dir = path(mkdtemp())
# export_to_xml should work.
try:
export_to_xml(self.draft_store, self.content_store, interface_location.course_key, root_dir, 'test_export')
finally:
shutil.rmtree(root_dir)
class TestMongoModuleStoreWithNoAssetCollection(TestMongoModuleStore):
'''
Tests a situation where no asset_collection is specified.
'''
@classmethod
def add_asset_collection(cls, doc_store_config):
"""
No asset collection.
"""
pass
@classmethod
def setupClass(cls):
super(TestMongoModuleStoreWithNoAssetCollection, cls).setupClass()
@classmethod
def teardownClass(cls):
super(TestMongoModuleStoreWithNoAssetCollection, cls).teardownClass()
def test_no_asset_collection(self):
courses = self.draft_store.get_courses()
course = courses[0]
# Confirm that no asset collection means no asset metadata.
self.assertEquals(self.draft_store.get_all_asset_metadata(course.id, 'asset'), None)
class TestMongoKeyValueStore(object):
"""
Tests for MongoKeyValueStore.
"""
def setUp(self):
self.data = {'foo': 'foo_value'}
self.course_id = SlashSeparatedCourseKey('org', 'course', 'run')
self.children = [self.course_id.make_usage_key('child', 'a'), self.course_id.make_usage_key('child', 'b')]
self.metadata = {'meta': 'meta_val'}
self.kvs = MongoKeyValueStore(self.data, self.children, self.metadata)
def test_read(self):
assert_equals(self.data['foo'], self.kvs.get(KeyValueStore.Key(Scope.content, None, None, 'foo')))
assert_equals(self.children, self.kvs.get(KeyValueStore.Key(Scope.children, None, None, 'children')))
assert_equals(self.metadata['meta'], self.kvs.get(KeyValueStore.Key(Scope.settings, None, None, 'meta')))
assert_equals(None, self.kvs.get(KeyValueStore.Key(Scope.parent, None, None, 'parent')))
def test_read_invalid_scope(self):
for scope in (Scope.preferences, Scope.user_info, Scope.user_state):
key = KeyValueStore.Key(scope, None, None, 'foo')
with assert_raises(InvalidScopeError):
self.kvs.get(key)
assert_false(self.kvs.has(key))
def test_read_non_dict_data(self):
self.kvs = MongoKeyValueStore('xml_data', self.children, self.metadata)
assert_equals('xml_data', self.kvs.get(KeyValueStore.Key(Scope.content, None, None, 'data')))
def _check_write(self, key, value):
self.kvs.set(key, value)
assert_equals(value, self.kvs.get(key))
def test_write(self):
yield (self._check_write, KeyValueStore.Key(Scope.content, None, None, 'foo'), 'new_data')
yield (self._check_write, KeyValueStore.Key(Scope.children, None, None, 'children'), [])
yield (self._check_write, KeyValueStore.Key(Scope.settings, None, None, 'meta'), 'new_settings')
def test_write_non_dict_data(self):
self.kvs = MongoKeyValueStore('xml_data', self.children, self.metadata)
self._check_write(KeyValueStore.Key(Scope.content, None, None, 'data'), 'new_data')
def test_write_invalid_scope(self):
for scope in (Scope.preferences, Scope.user_info, Scope.user_state, Scope.parent):
with assert_raises(InvalidScopeError):
self.kvs.set(KeyValueStore.Key(scope, None, None, 'foo'), 'new_value')
def _check_delete_default(self, key, default_value):
self.kvs.delete(key)
assert_equals(default_value, self.kvs.get(key))
assert self.kvs.has(key)
def _check_delete_key_error(self, key):
self.kvs.delete(key)
with assert_raises(KeyError):
self.kvs.get(key)
assert_false(self.kvs.has(key))
def test_delete(self):
yield (self._check_delete_key_error, KeyValueStore.Key(Scope.content, None, None, 'foo'))
yield (self._check_delete_default, KeyValueStore.Key(Scope.children, None, None, 'children'), [])
yield (self._check_delete_key_error, KeyValueStore.Key(Scope.settings, None, None, 'meta'))
def test_delete_invalid_scope(self):
for scope in (Scope.preferences, Scope.user_info, Scope.user_state, Scope.parent):
with assert_raises(InvalidScopeError):
self.kvs.delete(KeyValueStore.Key(scope, None, None, 'foo'))
| agpl-3.0 |
open-mmlab/mmdetection | mmdet/core/mask/utils.py | 1 | 2291 | import mmcv
import numpy as np
import pycocotools.mask as mask_util
def split_combined_polys(polys, poly_lens, polys_per_mask):
"""Split the combined 1-D polys into masks.
A mask is represented as a list of polys, and a poly is represented as
a 1-D array. In dataset, all masks are concatenated into a single 1-D
tensor. Here we need to split the tensor into original representations.
Args:
polys (list): a list (length = image num) of 1-D tensors
poly_lens (list): a list (length = image num) of poly length
polys_per_mask (list): a list (length = image num) of poly number
of each mask
Returns:
list: a list (length = image num) of list (length = mask num) of \
list (length = poly num) of numpy array.
"""
mask_polys_list = []
for img_id in range(len(polys)):
polys_single = polys[img_id]
polys_lens_single = poly_lens[img_id].tolist()
polys_per_mask_single = polys_per_mask[img_id].tolist()
split_polys = mmcv.slice_list(polys_single, polys_lens_single)
mask_polys = mmcv.slice_list(split_polys, polys_per_mask_single)
mask_polys_list.append(mask_polys)
return mask_polys_list
# TODO: move this function to more proper place
def encode_mask_results(mask_results):
"""Encode bitmap mask to RLE code.
Args:
mask_results (list | tuple[list]): bitmap mask results.
In mask scoring rcnn, mask_results is a tuple of (segm_results,
segm_cls_score).
Returns:
list | tuple: RLE encoded mask.
"""
if isinstance(mask_results, tuple): # mask scoring
cls_segms, cls_mask_scores = mask_results
else:
cls_segms = mask_results
num_classes = len(cls_segms)
encoded_mask_results = [[] for _ in range(num_classes)]
for i in range(len(cls_segms)):
for cls_segm in cls_segms[i]:
encoded_mask_results[i].append(
mask_util.encode(
np.array(
cls_segm[:, :, np.newaxis], order='F',
dtype='uint8'))[0]) # encoded with RLE
if isinstance(mask_results, tuple):
return encoded_mask_results, cls_mask_scores
else:
return encoded_mask_results
| apache-2.0 |
tumf/swift3 | swift3/controllers/logging.py | 2 | 1534 | # Copyright (c) 2010-2014 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from swift3.controllers.base import Controller, bucket_operation
from swift3.etree import Element, tostring
from swift3.response import HTTPOk, S3NotImplemented, NoLoggingStatusForKey
class LoggingStatusController(Controller):
"""
Handles the following APIs:
- GET Bucket logging
- PUT Bucket logging
Those APIs are logged as LOGGING_STATUS operations in the S3 server log.
"""
@bucket_operation(err_resp=NoLoggingStatusForKey)
def GET(self, req):
"""
Handles GET Bucket logging.
"""
req.get_response(self.app, method='HEAD')
# logging disabled
elem = Element('BucketLoggingStatus')
body = tostring(elem)
return HTTPOk(body=body, content_type='application/xml')
@bucket_operation(err_resp=NoLoggingStatusForKey)
def PUT(self, req):
"""
Handles PUT Bucket logging.
"""
raise S3NotImplemented()
| apache-2.0 |
dacjames/scrapy | scrapy/utils/reqser.py | 110 | 2288 | """
Helper functions for serializing (and deserializing) requests.
"""
import six
from scrapy.http import Request
from scrapy.utils.python import to_unicode, to_native_str
def request_to_dict(request, spider=None):
"""Convert Request object to a dict.
If a spider is given, it will try to find out the name of the spider method
used in the callback and store that as the callback.
"""
cb = request.callback
if callable(cb):
cb = _find_method(spider, cb)
eb = request.errback
if callable(eb):
eb = _find_method(spider, eb)
d = {
'url': to_unicode(request.url), # urls should be safe (safe_string_url)
'callback': cb,
'errback': eb,
'method': request.method,
'headers': dict(request.headers),
'body': request.body,
'cookies': request.cookies,
'meta': request.meta,
'_encoding': request._encoding,
'priority': request.priority,
'dont_filter': request.dont_filter,
}
return d
def request_from_dict(d, spider=None):
"""Create Request object from a dict.
If a spider is given, it will try to resolve the callbacks looking at the
spider for methods with the same name.
"""
cb = d['callback']
if cb and spider:
cb = _get_method(spider, cb)
eb = d['errback']
if eb and spider:
eb = _get_method(spider, eb)
return Request(
url=to_native_str(d['url']),
callback=cb,
errback=eb,
method=d['method'],
headers=d['headers'],
body=d['body'],
cookies=d['cookies'],
meta=d['meta'],
encoding=d['_encoding'],
priority=d['priority'],
dont_filter=d['dont_filter'])
def _find_method(obj, func):
if obj:
try:
func_self = six.get_method_self(func)
except AttributeError: # func has no __self__
pass
else:
if func_self is obj:
return six.get_method_function(func).__name__
raise ValueError("Function %s is not a method of: %s" % (func, obj))
def _get_method(obj, name):
name = str(name)
try:
return getattr(obj, name)
except AttributeError:
raise ValueError("Method %r not found in: %s" % (name, obj))
| bsd-3-clause |
x111ong/django | tests/conditional_processing/tests.py | 322 | 9157 | # -*- coding:utf-8 -*-
from __future__ import unicode_literals
from datetime import datetime
from django.test import SimpleTestCase, override_settings
FULL_RESPONSE = 'Test conditional get response'
LAST_MODIFIED = datetime(2007, 10, 21, 23, 21, 47)
LAST_MODIFIED_STR = 'Sun, 21 Oct 2007 23:21:47 GMT'
LAST_MODIFIED_NEWER_STR = 'Mon, 18 Oct 2010 16:56:23 GMT'
LAST_MODIFIED_INVALID_STR = 'Mon, 32 Oct 2010 16:56:23 GMT'
EXPIRED_LAST_MODIFIED_STR = 'Sat, 20 Oct 2007 23:21:47 GMT'
ETAG = 'b4246ffc4f62314ca13147c9d4f76974'
EXPIRED_ETAG = '7fae4cd4b0f81e7d2914700043aa8ed6'
@override_settings(ROOT_URLCONF='conditional_processing.urls')
class ConditionalGet(SimpleTestCase):
def assertFullResponse(self, response, check_last_modified=True, check_etag=True):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, FULL_RESPONSE.encode())
if check_last_modified:
self.assertEqual(response['Last-Modified'], LAST_MODIFIED_STR)
if check_etag:
self.assertEqual(response['ETag'], '"%s"' % ETAG)
def assertNotModified(self, response):
self.assertEqual(response.status_code, 304)
self.assertEqual(response.content, b'')
def test_without_conditions(self):
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_if_modified_since(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_NEWER_STR
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_INVALID_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_if_unmodified_since(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_NEWER_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_INVALID_STR
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
def test_if_none_match(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
# Several etags in If-None-Match is a bit exotic but why not?
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s", "%s"' % (ETAG, EXPIRED_ETAG)
response = self.client.get('/condition/')
self.assertNotModified(response)
def test_if_match(self):
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.put('/condition/etag/')
self.assertEqual(response.status_code, 200)
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.put('/condition/etag/')
self.assertEqual(response.status_code, 412)
def test_both_headers(self):
# see http://www.w3.org/Protocols/rfc2616/rfc2616-sec13.html#sec13.3.4
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertNotModified(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
def test_both_headers_2(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertFullResponse(response)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
self.client.defaults['HTTP_IF_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/')
self.assertEqual(response.status_code, 412)
def test_single_condition_1(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertNotModified(response)
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_2(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_3(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_4(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % EXPIRED_ETAG
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_5(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified2/')
self.assertNotModified(response)
response = self.client.get('/condition/etag2/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_6(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = '"%s"' % ETAG
response = self.client.get('/condition/etag2/')
self.assertNotModified(response)
response = self.client.get('/condition/last_modified2/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_7(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertEqual(response.status_code, 412)
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_8(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified/')
self.assertFullResponse(response, check_etag=False)
def test_single_condition_9(self):
self.client.defaults['HTTP_IF_UNMODIFIED_SINCE'] = EXPIRED_LAST_MODIFIED_STR
response = self.client.get('/condition/last_modified2/')
self.assertEqual(response.status_code, 412)
response = self.client.get('/condition/etag2/')
self.assertFullResponse(response, check_last_modified=False)
def test_single_condition_head(self):
self.client.defaults['HTTP_IF_MODIFIED_SINCE'] = LAST_MODIFIED_STR
response = self.client.head('/condition/')
self.assertNotModified(response)
def test_invalid_etag(self):
self.client.defaults['HTTP_IF_NONE_MATCH'] = r'"\"'
response = self.client.get('/condition/etag/')
self.assertFullResponse(response, check_last_modified=False)
| bsd-3-clause |
lsst-sqre/sphinxkit | documenteer/sphinxext/__init__.py | 2 | 1128 | """Sphinx/docutils extensions for LSST DM documentation.
Enable these extension by adding ``documenteer.sphinxext`` to your
extensions list in :file:`conf.py`::
extensions = [
# ...
'documenteer.sphinxext'
]
Some extensions require project-specific dependencies and are not
automatically enabled. They should be specified individually. They are:
- ``documenteer.sphinxext.bibtex``
"""
__all__ = ("setup",)
from pkg_resources import DistributionNotFound, get_distribution
from . import (
jira,
lsstdocushare,
mockcoderefs,
packagetoctree,
remotecodeblock,
)
def setup(app):
"""Wrapper for the `setup` functions of each extension module."""
jira.setup(app)
lsstdocushare.setup(app)
mockcoderefs.setup(app)
packagetoctree.setup(app)
remotecodeblock.setup(app)
try:
__version__ = get_distribution("documenteer").version
except DistributionNotFound:
# package is not installed
__version__ = "unknown"
return {
"version": __version__,
"parallel_read_safe": True,
"parallel_write_safe": True,
}
| mit |
RO-ny9/python-for-android | python3-alpha/python3-src/Lib/xmlrpc/server.py | 45 | 35818 | """XML-RPC Servers.
This module can be used to create simple XML-RPC servers
by creating a server and either installing functions, a
class instance, or by extending the SimpleXMLRPCServer
class.
It can also be used to handle XML-RPC requests in a CGI
environment using CGIXMLRPCRequestHandler.
The Doc* classes can be used to create XML-RPC servers that
serve pydoc-style documentation in response to HTTP
GET requests. This documentation is dynamically generated
based on the functions and methods registered with the
server.
A list of possible usage patterns follows:
1. Install functions:
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.serve_forever()
2. Install an instance:
class MyFuncs:
def __init__(self):
# make all of the sys functions available through sys.func_name
import sys
self.sys = sys
def _listMethods(self):
# implement this method so that system.listMethods
# knows to advertise the sys methods
return list_public_methods(self) + \
['sys.' + method for method in list_public_methods(self.sys)]
def pow(self, x, y): return pow(x, y)
def add(self, x, y) : return x + y
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(MyFuncs())
server.serve_forever()
3. Install an instance with custom dispatch method:
class Math:
def _listMethods(self):
# this method must be present for system.listMethods
# to work
return ['add', 'pow']
def _methodHelp(self, method):
# this method must be present for system.methodHelp
# to work
if method == 'add':
return "add(2,3) => 5"
elif method == 'pow':
return "pow(x, y[, z]) => number"
else:
# By convention, return empty
# string if no help is available
return ""
def _dispatch(self, method, params):
if method == 'pow':
return pow(*params)
elif method == 'add':
return params[0] + params[1]
else:
raise ValueError('bad method')
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_introspection_functions()
server.register_instance(Math())
server.serve_forever()
4. Subclass SimpleXMLRPCServer:
class MathServer(SimpleXMLRPCServer):
def _dispatch(self, method, params):
try:
# We are forcing the 'export_' prefix on methods that are
# callable through XML-RPC to prevent potential security
# problems
func = getattr(self, 'export_' + method)
except AttributeError:
raise Exception('method "%s" is not supported' % method)
else:
return func(*params)
def export_add(self, x, y):
return x + y
server = MathServer(("localhost", 8000))
server.serve_forever()
5. CGI script:
server = CGIXMLRPCRequestHandler()
server.register_function(pow)
server.handle_request()
"""
# Written by Brian Quinlan (brian@sweetapp.com).
# Based on code written by Fredrik Lundh.
from xmlrpc.client import Fault, dumps, loads, gzip_encode, gzip_decode
from http.server import BaseHTTPRequestHandler
import http.server
import socketserver
import sys
import os
import re
import pydoc
import inspect
import traceback
try:
import fcntl
except ImportError:
fcntl = None
def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
"""resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
Resolves a dotted attribute name to an object. Raises
an AttributeError if any attribute in the chain starts with a '_'.
If the optional allow_dotted_names argument is false, dots are not
supported and this function operates similar to getattr(obj, attr).
"""
if allow_dotted_names:
attrs = attr.split('.')
else:
attrs = [attr]
for i in attrs:
if i.startswith('_'):
raise AttributeError(
'attempt to access private attribute "%s"' % i
)
else:
obj = getattr(obj,i)
return obj
def list_public_methods(obj):
"""Returns a list of attribute strings, found in the specified
object, which represent callable attributes"""
return [member for member in dir(obj)
if not member.startswith('_') and
hasattr(getattr(obj, member), '__call__')]
class SimpleXMLRPCDispatcher:
"""Mix-in class that dispatches XML-RPC requests.
This class is used to register XML-RPC method handlers
and then to dispatch them. This class doesn't need to be
instanced directly when used by SimpleXMLRPCServer but it
can be instanced when used by the MultiPathXMLRPCServer
"""
def __init__(self, allow_none=False, encoding=None):
self.funcs = {}
self.instance = None
self.allow_none = allow_none
self.encoding = encoding or 'utf-8'
def register_instance(self, instance, allow_dotted_names=False):
"""Registers an instance to respond to XML-RPC requests.
Only one instance can be installed at a time.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called. Methods beginning with an '_'
are considered private and will not be called by
SimpleXMLRPCServer.
If a registered function matches a XML-RPC request, then it
will be called instead of the registered instance.
If the optional allow_dotted_names argument is true and the
instance does not have a _dispatch method, method names
containing dots are supported and resolved, as long as none of
the name segments start with an '_'.
*** SECURITY WARNING: ***
Enabling the allow_dotted_names options allows intruders
to access your module's global variables and may allow
intruders to execute arbitrary code on your machine. Only
use this option on a secure, closed network.
"""
self.instance = instance
self.allow_dotted_names = allow_dotted_names
def register_function(self, function, name=None):
"""Registers a function to respond to XML-RPC requests.
The optional name argument can be used to set a Unicode name
for the function.
"""
if name is None:
name = function.__name__
self.funcs[name] = function
def register_introspection_functions(self):
"""Registers the XML-RPC introspection methods in the system
namespace.
see http://xmlrpc.usefulinc.com/doc/reserved.html
"""
self.funcs.update({'system.listMethods' : self.system_listMethods,
'system.methodSignature' : self.system_methodSignature,
'system.methodHelp' : self.system_methodHelp})
def register_multicall_functions(self):
"""Registers the XML-RPC multicall method in the system
namespace.
see http://www.xmlrpc.com/discuss/msgReader$1208"""
self.funcs.update({'system.multicall' : self.system_multicall})
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
"""Dispatches an XML-RPC method from marshalled (XML) data.
XML-RPC methods are dispatched from the marshalled (XML) data
using the _dispatch method and the result is returned as
marshalled data. For backwards compatibility, a dispatch
function can be provided as an argument (see comment in
SimpleXMLRPCRequestHandler.do_POST) but overriding the
existing method through subclassing is the preferred means
of changing method dispatch behavior.
"""
try:
params, method = loads(data)
# generate response
if dispatch_method is not None:
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
# wrap response in a singleton tuple
response = (response,)
response = dumps(response, methodresponse=1,
allow_none=self.allow_none, encoding=self.encoding)
except Fault as fault:
response = dumps(fault, allow_none=self.allow_none,
encoding=self.encoding)
except:
# report exception back to server
exc_type, exc_value, exc_tb = sys.exc_info()
response = dumps(
Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none,
)
return response.encode(self.encoding)
def system_listMethods(self):
"""system.listMethods() => ['add', 'subtract', 'multiple']
Returns a list of the methods supported by the server."""
methods = set(self.funcs.keys())
if self.instance is not None:
# Instance can implement _listMethod to return a list of
# methods
if hasattr(self.instance, '_listMethods'):
methods |= set(self.instance._listMethods())
# if the instance has a _dispatch method then we
# don't have enough information to provide a list
# of methods
elif not hasattr(self.instance, '_dispatch'):
methods |= set(list_public_methods(self.instance))
return sorted(methods)
def system_methodSignature(self, method_name):
"""system.methodSignature('add') => [double, int, int]
Returns a list describing the signature of the method. In the
above example, the add method takes two integers as arguments
and returns a double result.
This server does NOT support system.methodSignature."""
# See http://xmlrpc.usefulinc.com/doc/sysmethodsig.html
return 'signatures not supported'
def system_methodHelp(self, method_name):
"""system.methodHelp('add') => "Adds two integers together"
Returns a string containing documentation for the specified method."""
method = None
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
# Instance can implement _methodHelp to return help for a method
if hasattr(self.instance, '_methodHelp'):
return self.instance._methodHelp(method_name)
# if the instance has a _dispatch method then we
# don't have enough information to provide help
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name,
self.allow_dotted_names
)
except AttributeError:
pass
# Note that we aren't checking that the method actually
# be a callable object of some kind
if method is None:
return ""
else:
import pydoc
return pydoc.getdoc(method)
def system_multicall(self, call_list):
"""system.multicall([{'methodName': 'add', 'params': [2, 2]}, ...]) => \
[[4], ...]
Allows the caller to package multiple XML-RPC calls into a single
request.
See http://www.xmlrpc.com/discuss/msgReader$1208
"""
results = []
for call in call_list:
method_name = call['methodName']
params = call['params']
try:
# XXX A marshalling error in any response will fail the entire
# multicall. If someone cares they should fix this.
results.append([self._dispatch(method_name, params)])
except Fault as fault:
results.append(
{'faultCode' : fault.faultCode,
'faultString' : fault.faultString}
)
except:
exc_type, exc_value, exc_tb = sys.exc_info()
results.append(
{'faultCode' : 1,
'faultString' : "%s:%s" % (exc_type, exc_value)}
)
return results
def _dispatch(self, method, params):
"""Dispatches the XML-RPC method.
XML-RPC calls are forwarded to a registered function that
matches the called XML-RPC method name. If no such function
exists then the call is forwarded to the registered instance,
if available.
If the registered instance has a _dispatch method then that
method will be called with the name of the XML-RPC method and
its parameters as a tuple
e.g. instance._dispatch('add',(2,3))
If the registered instance does not have a _dispatch method
then the instance will be searched to find a matching method
and, if found, will be called.
Methods beginning with an '_' are considered private and will
not be called.
"""
func = None
try:
# check to see if a matching function has been registered
func = self.funcs[method]
except KeyError:
if self.instance is not None:
# check for a _dispatch method
if hasattr(self.instance, '_dispatch'):
return self.instance._dispatch(method, params)
else:
# call instance method directly
try:
func = resolve_dotted_attribute(
self.instance,
method,
self.allow_dotted_names
)
except AttributeError:
pass
if func is not None:
return func(*params)
else:
raise Exception('method "%s" is not supported' % method)
class SimpleXMLRPCRequestHandler(BaseHTTPRequestHandler):
"""Simple XML-RPC request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
"""
# Class attribute listing the accessible path components;
# paths not on this list will result in a 404 error.
rpc_paths = ('/', '/RPC2')
#if not None, encode responses larger than this, if possible
encode_threshold = 1400 #a common MTU
#Override form StreamRequestHandler: full buffering of output
#and no Nagle.
wbufsize = -1
disable_nagle_algorithm = True
# a re to match a gzip Accept-Encoding
aepattern = re.compile(r"""
\s* ([^\s;]+) \s* #content-coding
(;\s* q \s*=\s* ([0-9\.]+))? #q
""", re.VERBOSE | re.IGNORECASE)
def accept_encodings(self):
r = {}
ae = self.headers.get("Accept-Encoding", "")
for e in ae.split(","):
match = self.aepattern.match(e)
if match:
v = match.group(3)
v = float(v) if v else 1.0
r[match.group(1)] = v
return r
def is_rpc_path_valid(self):
if self.rpc_paths:
return self.path in self.rpc_paths
else:
# If .rpc_paths is empty, just assume all paths are legal
return True
def do_POST(self):
"""Handles the HTTP POST request.
Attempts to interpret all HTTP POST requests as XML-RPC calls,
which are forwarded to the server's _dispatch method for handling.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
try:
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
max_chunk_size = 10*1024*1024
size_remaining = int(self.headers["content-length"])
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
L.append(self.rfile.read(chunk_size))
size_remaining -= len(L[-1])
data = b''.join(L)
data = self.decode_request_content(data)
if data is None:
return #response has been sent
# In previous versions of SimpleXMLRPCServer, _dispatch
# could be overridden in this class, instead of in
# SimpleXMLRPCDispatcher. To maintain backwards compatibility,
# check to see if a subclass implements _dispatch and dispatch
# using that method if present.
response = self.server._marshaled_dispatch(
data, getattr(self, '_dispatch', None), self.path
)
except Exception as e: # This should only happen if the module is buggy
# internal error, report as HTTP server error
self.send_response(500)
# Send information about the exception if requested
if hasattr(self.server, '_send_traceback_header') and \
self.server._send_traceback_header:
self.send_header("X-exception", str(e))
trace = traceback.format_exc()
trace = str(trace.encode('ASCII', 'backslashreplace'), 'ASCII')
self.send_header("X-traceback", trace)
self.send_header("Content-length", "0")
self.end_headers()
else:
self.send_response(200)
self.send_header("Content-type", "text/xml")
if self.encode_threshold is not None:
if len(response) > self.encode_threshold:
q = self.accept_encodings().get("gzip", 0)
if q:
try:
response = gzip_encode(response)
self.send_header("Content-Encoding", "gzip")
except NotImplementedError:
pass
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def decode_request_content(self, data):
#support gzip encoding of request
encoding = self.headers.get("content-encoding", "identity").lower()
if encoding == "identity":
return data
if encoding == "gzip":
try:
return gzip_decode(data)
except NotImplementedError:
self.send_response(501, "encoding %r not supported" % encoding)
except ValueError:
self.send_response(400, "error decoding gzip content")
else:
self.send_response(501, "encoding %r not supported" % encoding)
self.send_header("Content-length", "0")
self.end_headers()
def report_404 (self):
# Report a 404 error
self.send_response(404)
response = b'No such page'
self.send_header("Content-type", "text/plain")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
def log_request(self, code='-', size='-'):
"""Selectively log an accepted request."""
if self.server.logRequests:
BaseHTTPRequestHandler.log_request(self, code, size)
class SimpleXMLRPCServer(socketserver.TCPServer,
SimpleXMLRPCDispatcher):
"""Simple XML-RPC server.
Simple XML-RPC server that allows functions and a single instance
to be installed to handle requests. The default implementation
attempts to dispatch XML-RPC calls to the functions or instance
installed in the server. Override the _dispatch method inhereted
from SimpleXMLRPCDispatcher to change this behavior.
"""
allow_reuse_address = True
# Warning: this is for debugging purposes only! Never set this to True in
# production code, as will be sending out sensitive information (exception
# and stack trace details) when exceptions are raised inside
# SimpleXMLRPCRequestHandler.do_POST
_send_traceback_header = False
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None, bind_and_activate=True):
self.logRequests = logRequests
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
socketserver.TCPServer.__init__(self, addr, requestHandler, bind_and_activate)
# [Bug #1222790] If possible, set close-on-exec flag; if a
# method spawns a subprocess, the subprocess shouldn't have
# the listening socket open.
if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
flags |= fcntl.FD_CLOEXEC
fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
class MultiPathXMLRPCServer(SimpleXMLRPCServer):
"""Multipath XML-RPC Server
This specialization of SimpleXMLRPCServer allows the user to create
multiple Dispatcher instances and assign them to different
HTTP request paths. This makes it possible to run two or more
'virtual XML-RPC servers' at the same port.
Make sure that the requestHandler accepts the paths in question.
"""
def __init__(self, addr, requestHandler=SimpleXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None, bind_and_activate=True):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests, allow_none,
encoding, bind_and_activate)
self.dispatchers = {}
self.allow_none = allow_none
self.encoding = encoding
def add_dispatcher(self, path, dispatcher):
self.dispatchers[path] = dispatcher
return dispatcher
def get_dispatcher(self, path):
return self.dispatchers[path]
def _marshaled_dispatch(self, data, dispatch_method = None, path = None):
try:
response = self.dispatchers[path]._marshaled_dispatch(
data, dispatch_method, path)
except:
# report low level exception back to server
# (each dispatcher should have handled their own
# exceptions)
exc_type, exc_value = sys.exc_info()[:2]
response = xmlrpclib.dumps(
xmlrpclib.Fault(1, "%s:%s" % (exc_type, exc_value)),
encoding=self.encoding, allow_none=self.allow_none)
return response
class CGIXMLRPCRequestHandler(SimpleXMLRPCDispatcher):
"""Simple handler for XML-RPC data passed through CGI."""
def __init__(self, allow_none=False, encoding=None):
SimpleXMLRPCDispatcher.__init__(self, allow_none, encoding)
def handle_xmlrpc(self, request_text):
"""Handle a single XML-RPC request"""
response = self._marshaled_dispatch(request_text)
print('Content-Type: text/xml')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_get(self):
"""Handle a single HTTP GET request.
Default implementation indicates an error because
XML-RPC uses the POST method.
"""
code = 400
message, explain = BaseHTTPRequestHandler.responses[code]
response = http.server.DEFAULT_ERROR_MESSAGE % \
{
'code' : code,
'message' : message,
'explain' : explain
}
response = response.encode('utf-8')
print('Status: %d %s' % (code, message))
print('Content-Type: %s' % http.server.DEFAULT_ERROR_CONTENT_TYPE)
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def handle_request(self, request_text=None):
"""Handle a single XML-RPC request passed through a CGI post method.
If no XML data is given then it is read from stdin. The resulting
XML-RPC response is printed to stdout along with the correct HTTP
headers.
"""
if request_text is None and \
os.environ.get('REQUEST_METHOD', None) == 'GET':
self.handle_get()
else:
# POST data is normally available through stdin
try:
length = int(os.environ.get('CONTENT_LENGTH', None))
except (ValueError, TypeError):
length = -1
if request_text is None:
request_text = sys.stdin.read(length)
self.handle_xmlrpc(request_text)
# -----------------------------------------------------------------------------
# Self documenting XML-RPC Server.
class ServerHTMLDoc(pydoc.HTMLDoc):
"""Class used to generate pydoc HTML document for a server"""
def markup(self, text, escape=None, funcs={}, classes={}, methods={}):
"""Mark up some plain text, given a context of symbols to look for.
Each context dictionary maps object names to anchor names."""
escape = escape or self.escape
results = []
here = 0
# XXX Note that this regular expression does not allow for the
# hyperlinking of arbitrary strings being used as method
# names. Only methods with names consisting of word characters
# and '.'s are hyperlinked.
pattern = re.compile(r'\b((http|ftp)://\S+[\w/]|'
r'RFC[- ]?(\d+)|'
r'PEP[- ]?(\d+)|'
r'(self\.)?((?:\w|\.)+))\b')
while 1:
match = pattern.search(text, here)
if not match: break
start, end = match.span()
results.append(escape(text[here:start]))
all, scheme, rfc, pep, selfdot, name = match.groups()
if scheme:
url = escape(all).replace('"', '"')
results.append('<a href="%s">%s</a>' % (url, url))
elif rfc:
url = 'http://www.rfc-editor.org/rfc/rfc%d.txt' % int(rfc)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif pep:
url = 'http://www.python.org/dev/peps/pep-%04d/' % int(pep)
results.append('<a href="%s">%s</a>' % (url, escape(all)))
elif text[end:end+1] == '(':
results.append(self.namelink(name, methods, funcs, classes))
elif selfdot:
results.append('self.<strong>%s</strong>' % name)
else:
results.append(self.namelink(name, classes))
here = end
results.append(escape(text[here:]))
return ''.join(results)
def docroutine(self, object, name, mod=None,
funcs={}, classes={}, methods={}, cl=None):
"""Produce HTML documentation for a function or method object."""
anchor = (cl and cl.__name__ or '') + '-' + name
note = ''
title = '<a name="%s"><strong>%s</strong></a>' % (
self.escape(anchor), self.escape(name))
if inspect.ismethod(object):
args, varargs, varkw, defaults = inspect.getargspec(object)
# exclude the argument bound to the instance, it will be
# confusing to the non-Python user
argspec = inspect.formatargspec (
args[1:],
varargs,
varkw,
defaults,
formatvalue=self.formatvalue
)
elif inspect.isfunction(object):
args, varargs, varkw, defaults = inspect.getargspec(object)
argspec = inspect.formatargspec(
args, varargs, varkw, defaults, formatvalue=self.formatvalue)
else:
argspec = '(...)'
if isinstance(object, tuple):
argspec = object[0] or argspec
docstring = object[1] or ""
else:
docstring = pydoc.getdoc(object)
decl = title + argspec + (note and self.grey(
'<font face="helvetica, arial">%s</font>' % note))
doc = self.markup(
docstring, self.preformat, funcs, classes, methods)
doc = doc and '<dd><tt>%s</tt></dd>' % doc
return '<dl><dt>%s</dt>%s</dl>\n' % (decl, doc)
def docserver(self, server_name, package_documentation, methods):
"""Produce HTML documentation for an XML-RPC server."""
fdict = {}
for key, value in methods.items():
fdict[key] = '#-' + key
fdict[value] = fdict[key]
server_name = self.escape(server_name)
head = '<big><big><strong>%s</strong></big></big>' % server_name
result = self.heading(head, '#ffffff', '#7799ee')
doc = self.markup(package_documentation, self.preformat, fdict)
doc = doc and '<tt>%s</tt>' % doc
result = result + '<p>%s</p>\n' % doc
contents = []
method_items = sorted(methods.items())
for key, value in method_items:
contents.append(self.docroutine(value, key, funcs=fdict))
result = result + self.bigsection(
'Methods', '#ffffff', '#eeaa77', ''.join(contents))
return result
class XMLRPCDocGenerator:
"""Generates documentation for an XML-RPC server.
This class is designed as mix-in and should not
be constructed directly.
"""
def __init__(self):
# setup variables used for HTML documentation
self.server_name = 'XML-RPC Server Documentation'
self.server_documentation = \
"This server exports the following methods through the XML-RPC "\
"protocol."
self.server_title = 'XML-RPC Server Documentation'
def set_server_title(self, server_title):
"""Set the HTML title of the generated server documentation"""
self.server_title = server_title
def set_server_name(self, server_name):
"""Set the name of the generated HTML server documentation"""
self.server_name = server_name
def set_server_documentation(self, server_documentation):
"""Set the documentation string for the entire server."""
self.server_documentation = server_documentation
def generate_html_documentation(self):
"""generate_html_documentation() => html documentation for the server
Generates HTML documentation for the server using introspection for
installed functions and instances that do not implement the
_dispatch method. Alternatively, instances can choose to implement
the _get_method_argstring(method_name) method to provide the
argument string used in the documentation and the
_methodHelp(method_name) method to provide the help text used
in the documentation."""
methods = {}
for method_name in self.system_listMethods():
if method_name in self.funcs:
method = self.funcs[method_name]
elif self.instance is not None:
method_info = [None, None] # argspec, documentation
if hasattr(self.instance, '_get_method_argstring'):
method_info[0] = self.instance._get_method_argstring(method_name)
if hasattr(self.instance, '_methodHelp'):
method_info[1] = self.instance._methodHelp(method_name)
method_info = tuple(method_info)
if method_info != (None, None):
method = method_info
elif not hasattr(self.instance, '_dispatch'):
try:
method = resolve_dotted_attribute(
self.instance,
method_name
)
except AttributeError:
method = method_info
else:
method = method_info
else:
assert 0, "Could not find method in self.functions and no "\
"instance installed"
methods[method_name] = method
documenter = ServerHTMLDoc()
documentation = documenter.docserver(
self.server_name,
self.server_documentation,
methods
)
return documenter.page(self.server_title, documentation)
class DocXMLRPCRequestHandler(SimpleXMLRPCRequestHandler):
"""XML-RPC and documentation request handler class.
Handles all HTTP POST requests and attempts to decode them as
XML-RPC requests.
Handles all HTTP GET requests and interprets them as requests
for documentation.
"""
def do_GET(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
# Check that the path is legal
if not self.is_rpc_path_valid():
self.report_404()
return
response = self.server.generate_html_documentation().encode('utf-8')
self.send_response(200)
self.send_header("Content-type", "text/html")
self.send_header("Content-length", str(len(response)))
self.end_headers()
self.wfile.write(response)
class DocXMLRPCServer( SimpleXMLRPCServer,
XMLRPCDocGenerator):
"""XML-RPC and HTML documentation server.
Adds the ability to serve server documentation to the capabilities
of SimpleXMLRPCServer.
"""
def __init__(self, addr, requestHandler=DocXMLRPCRequestHandler,
logRequests=True, allow_none=False, encoding=None,
bind_and_activate=True):
SimpleXMLRPCServer.__init__(self, addr, requestHandler, logRequests,
allow_none, encoding, bind_and_activate)
XMLRPCDocGenerator.__init__(self)
class DocCGIXMLRPCRequestHandler( CGIXMLRPCRequestHandler,
XMLRPCDocGenerator):
"""Handler for XML-RPC data and documentation requests passed through
CGI"""
def handle_get(self):
"""Handles the HTTP GET request.
Interpret all HTTP GET requests as requests for server
documentation.
"""
response = self.generate_html_documentation().encode('utf-8')
print('Content-Type: text/html')
print('Content-Length: %d' % len(response))
print()
sys.stdout.flush()
sys.stdout.buffer.write(response)
sys.stdout.buffer.flush()
def __init__(self):
CGIXMLRPCRequestHandler.__init__(self)
XMLRPCDocGenerator.__init__(self)
if __name__ == '__main__':
print('Running XML-RPC server on port 8000')
server = SimpleXMLRPCServer(("localhost", 8000))
server.register_function(pow)
server.register_function(lambda x,y: x+y, 'add')
server.serve_forever()
| apache-2.0 |
allthingstalk/python-sdk | examples/raspi/led_actuator.py | 1 | 2048 | #!/usr/bin/env python3
# _ _ _ _____ _ _ _____ _ _ ___ ___ _ __
# /_\ | | |_ _| |_ (_)_ _ __ _ __|_ _|_ _| | |__ / __| \| |/ /
# / _ \| | | | | | ' \| | ' \/ _` (_-< | |/ _` | | / / \__ \ |) | ' <
# /_/ \_\_|_| |_| |_||_|_|_||_\__, /__/ |_|\__,_|_|_\_\ |___/___/|_|\_\
# |___/
#
# Copyright 2017 AllThingsTalk
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# AllThingsTalk LED Actuator experiment
#
# Before running this experiment, make certain that grovepi3 and allthingstalk
# libraries are installed and globally accessible.
#
import time
import RPi.GPIO as GPIO
from allthingstalk import Device, BooleanAsset, Client, Asset
# Parameters used to authorize and identify your device
# Get them on maker.allthingstalk.com
DEVICE_TOKEN = '<DEVICE_TOKEN>'
DEVICE_ID = '<DEVICE_ID>'
class LedActuator(Device):
led = BooleanAsset(kind=Asset.ACTUATOR)
# Authorize and connect your device with the Cloud
client = Client(DEVICE_TOKEN)
device = LedActuator(client=client, id=DEVICE_ID)
# LED is connected to GPIO4
led_pin = 4
# Led's pin needs to be in OUTPUT mode
GPIO.setmode(GPIO.BCM)
GPIO.setup(led_pin, GPIO.OUT)
@LedActuator.command.led
def on_led(device, value, at):
# Turn led On or Off depending on the received Command
GPIO.output(led_pin, value)
# Send value to the cloud to reflect physical state of the led
device.led = value
print('Led state updated to %s.' % value)
while True:
print('Waiting for actuation...')
time.sleep(5)
| apache-2.0 |
amy12xx/keras | examples/mnist_mlp.py | 69 | 1719 | from __future__ import absolute_import
from __future__ import print_function
import numpy as np
np.random.seed(1337) # for reproducibility
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.optimizers import SGD, Adam, RMSprop
from keras.utils import np_utils
'''
Train a simple deep NN on the MNIST dataset.
Get to 98.30% test accuracy after 20 epochs (there is *a lot* of margin for parameter tuning).
2 seconds per epoch on a GRID K520 GPU.
'''
batch_size = 128
nb_classes = 10
nb_epoch = 20
# the data, shuffled and split between tran and test sets
(X_train, y_train), (X_test, y_test) = mnist.load_data()
X_train = X_train.reshape(60000, 784)
X_test = X_test.reshape(10000, 784)
X_train = X_train.astype("float32")
X_test = X_test.astype("float32")
X_train /= 255
X_test /= 255
print(X_train.shape[0], 'train samples')
print(X_test.shape[0], 'test samples')
# convert class vectors to binary class matrices
Y_train = np_utils.to_categorical(y_train, nb_classes)
Y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(784, 128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, 128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128, 10))
model.add(Activation('softmax'))
rms = RMSprop()
model.compile(loss='categorical_crossentropy', optimizer=rms)
model.fit(X_train, Y_train, batch_size=batch_size, nb_epoch=nb_epoch, show_accuracy=True, verbose=2, validation_data=(X_test, Y_test))
score = model.evaluate(X_test, Y_test, show_accuracy=True, verbose=0)
print('Test score:', score[0])
print('Test accuracy:', score[1])
| mit |
palmerjh/iEBE | PlayGround/job-2/crank/SequentialEventDriver.py | 6 | 27521 | #! /usr/bin/env python
# This package performs a sequential calculations of a given number of events,
# after reading parameters from ParameterDict.py. The most important control
# parameters are set in controlParameterList, for other parameters see
# allParameterLists. This package is intended to be working-at-background thus
# only basic output are generated. When necessary, other functions given in the
# package for single executables can be invoked individually for more
# flexibilities.
# The main entry is the sequentialEventDriverShell function.
from os import path, getcwd, remove, makedirs
from sys import stdout
from shutil import move, copy, rmtree
from glob import glob
from subprocess import call
import numpy as np
class ExecutionError(Exception): pass # used to signal my own exception
# set global default parameters
allParameterLists = [
'controlParameterList',
'centralityParameters',
'superMCControl',
'superMCParameters',
'hydroControl',
'hydroParameters',
'iSSControl',
'iSSParameters',
'iSControl',
'iSParameters',
'osc2uControl',
'osc2uParameters',
'urqmdControl',
'urqmdParameters',
'binUtilitiesControl',
'binUtilitiesParameters',
]
controlParameterList = {
'simulation_type' : 'hybrid', # 'hybrid' or 'hydro'
'niceness' : 10, # range from 0 to 19 for process priority, 0 for the highest priority
'numberOfEvents' : 10, # how many sequential calculations
'rootDir' : path.abspath('../'),
'resultDir' : path.abspath('placeholder/'), # final results will be saved here, absolute
'eventResultDirPattern' : 'event-%d', # %d->event_id, where event results are saved
'eventResultDir' : None, # used to pass event result folder from sequentialEventDriverShell to others
'combinedUrqmdFile' : 'urqmdCombined.txt', # urqmd from all events will be combined into this file
'buildCMD' : 'make build',
'cleanCMD' : 'make clean',
}
centralityParameters = {
'centrality': '0-5%', # centrality bin
'cut_type': 'total_entropy',
# centrality cut variable: total_entropy or Npart
}
superMCControl = {
'mainDir' : 'superMC',
'dataDir' : 'data', # where initial conditions are stored, relative
'dataFiles' : '*event*.dat', # data filenames
'numberOfEventsParameterName' : 'nev',
'executable' : 'superMC.e',
}
superMCParameters = {
'which_mc_model' : 5,
'sub_model' : 1,
'Npmin' : 0,
'Npmax' : 1000,
'bmin' : 0,
'bmax' : 20,
'cutdSdy' : 1,
'cutdSdy_lowerBound' : 551.864,
'cutdSdy_upperBound' : 1000000.0,
'Aproj' : 208,
'Atarg' : 208,
'ecm' : 2760,
'finalFactor' : 56.763,
'use_ed' : 0,
'alpha' : 0.118,
'lambda' : 0.288,
'operation' : 1,
'cc_fluctuation_model' : 6,
}
hydroControl = {
'mainDir' : 'VISHNew',
'initialConditionDir' : 'Initial', # hydro initial condition folder, relative
'initialConditionFile' : 'InitialSd.dat', # IC filename
'resultDir' : 'results', # hydro results folder, relative
'resultFiles' : '*.dat', # results files
'saveICFile' : True, # whether to save initial condition file
'saveResultGlobs' : ['surface.dat', 'dec*.dat', 'ecc*.dat'], # files match these globs will be saved
'executable' : 'VISHNew.e',
}
hydroParameters = {
'IINIT' : 2,
'IEOS' : 7,
'iEin' : 1,
'vis' : 0.08,
'iLS' : 130,
'T0' : 0.6, # tau_0
'Edec' : 0.3, # 0.3->160 MeV, 0.18->120 MeV
'factor' : 1.0
}
iSSControl = {
'mainDir' : 'iSS',
'operationDir' : 'results',
'saveResultGlobs' : ['*vn*.dat'], # files in the operation directory matching these globs will be saved
'OSCARFile' : 'OSCAR.DAT',
'executable' : 'iSS.e',
}
iSSParameters = {
'calculate_vn' : 0,
'MC_sampling' : 2,
'number_of_repeated_sampling' : 10,
'y_LB' : -2.5,
'y_RB' : 2.5,
}
iSControl = {
'mainDir' : 'iS',
'operationDir' : 'results',
'saveResultGlobs' : ['dN_ptdptdphidy.dat', '*_vndata.dat', 'v2data*'], # files in the operation directory matching these globs will be saved
'executables' : ('iS.e', 'resonance.e', 'iInteSp.e'),
'entryShell' : 'iS_withResonance.sh',
}
iSParameters = {}
osc2uControl = {
'mainDir' : 'osc2u',
'outputFilename' : 'fort.14',
'saveOSCAR' : True, # whether to save OSCAR file
'executable' : 'osc2u.e',
}
osc2uParameters = {}
urqmdControl = {
'mainDir' : 'urqmd',
'controlFilename' : 'uqmd.burner',
'ICFilename' : 'OSCAR.input',
'outputFilename' : 'particle_list.dat',
'saveOutputFile' : True, # whether to save the output file
'executable' : 'urqmd.e',
'entryShell' : 'runqmd.sh',
}
urqmdParameters = {}
binUtilitiesControl = {
'mainDir' : 'binUtilities',
'operationDir' : 'results',
'saveResultGlobs' : ['*flow*.dat', 'pT_*.dat'], # files in the operation directory matching these globs will be saved
'executable' : 'urqmdBinShell.py',
}
binUtilitiesParameters = {}
EbeCollectorControl = {
'mainDir' : 'EbeCollector',
'executable_hybrid' : 'EbeCollectorShell_hydroWithUrQMD.py',
'executable_hydro' : 'EbeCollectorShell_pureHydro.py',
}
EbeCollectorParameters = {
'subfolderPattern' : '"event-(\d*)"',
'databaseFilename' : 'collected.db',
}
def readInParameters():
""" Overwrite default parameter lists with those in ParameterDict. """
try:
import ParameterDict
for aParameterList in allParameterLists:
if aParameterList in dir(ParameterDict):
exec("%s.update(ParameterDict.%s)" % (aParameterList, aParameterList))
except (IOError, SyntaxError):
raise ExecutionError("Errors trying to open/read the ParameterDict.py file!")
def translate_centrality_cut():
"""
translate the centrality boundaries to Npart, dS/dy, b values and update
the parameter lists for simulations
"""
cut_type = centralityParameters['cut_type']
if cut_type not in ['total_entropy', 'Npart']:
print "invalid centrality cut type: ", cut_type
exit(1)
centrality_string = centralityParameters['centrality']
centrality_lower_bound = float(centrality_string.split('-')[0])
centrality_upper_bound = float(
centrality_string.split('-')[1].split('%')[0])
if superMCParameters['which_mc_model'] == 5:
model_name = 'MCGlb'
elif superMCParameters['which_mc_model'] == 1:
model_name = 'MCKLN'
if superMCParameters['cc_fluctuation_model'] != 0:
multiplicity_fluctuation = 'withMultFluct'
else:
multiplicity_fluctuation = 'noMultFluct'
collision_energy = str(superMCParameters['ecm'])
Aproj = superMCParameters['Aproj']
Atrag = superMCParameters['Atarg']
nucleus_name_dict = {
208: 'Pb',
197: 'Au',
238: 'U',
63: 'Cu',
1: 'p',
2: 'd',
3: 'He',
}
if Aproj == Atrag: #symmetric collision
nucleus_name = nucleus_name_dict[Aproj]+nucleus_name_dict[Atrag]
else: # asymmetric collision
nucleus_name = (nucleus_name_dict[min(Aproj, Atrag)]
+ nucleus_name_dict[max(Aproj, Atrag)])
centrality_cut_file_name = (
'iebe_centralityCut_%s_%s_sigmaNN_gauss_d0.9_%s.dat'
% (cut_type, model_name + nucleus_name + collision_energy,
multiplicity_fluctuation)
)
try:
centrality_cut_file = np.loadtxt(
path.join(path.abspath('../centrality_cut_tables'),
centrality_cut_file_name))
except IOError:
print "Can not find the centrality cut table for the collision system"
print centrality_cut_file_name
exit(1)
lower_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_lower_bound+1e-30))
upper_idx = (
centrality_cut_file[:, 0].searchsorted(centrality_upper_bound))
cut_value_upper = (
(centrality_cut_file[lower_idx-1, 1]
- centrality_cut_file[lower_idx, 1])
/(centrality_cut_file[lower_idx-1, 0]
- centrality_cut_file[lower_idx, 0])
*(centrality_lower_bound - centrality_cut_file[lower_idx-1, 0])
+ centrality_cut_file[lower_idx-1, 1]
)
cut_value_low = (
(centrality_cut_file[upper_idx-1, 1]
- centrality_cut_file[upper_idx, 1])
/(centrality_cut_file[upper_idx-1, 0]
- centrality_cut_file[upper_idx, 0])
*(centrality_upper_bound - centrality_cut_file[upper_idx-1, 0])
+ centrality_cut_file[upper_idx-1, 1]
)
if cut_type == 'total_entropy':
superMCParameters['cutdSdy'] = 1
npart_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 2])
npart_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 3])
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['cutdSdy_lowerBound'] = cut_value_low
superMCParameters['cutdSdy_upperBound'] = cut_value_upper
elif cut_type == 'Npart':
superMCParameters['cutdSdy'] = 0
b_min = min(centrality_cut_file[lower_idx-1:upper_idx+1, 4])
b_max = max(centrality_cut_file[lower_idx-1:upper_idx+1, 5])
superMCParameters['Npmax'] = npart_max
superMCParameters['Npmin'] = npart_min
superMCParameters['bmax'] = b_max
superMCParameters['bmin'] = b_min
#print out information
print '-'*80
print('%s collisions at sqrt{s} = %s A GeV with %s initial conditions'
% (nucleus_name , collision_energy, model_name))
print("Centrality : %g - %g"
% (centrality_lower_bound, centrality_upper_bound) + r"%")
print 'centrality cut on ', cut_type
if cut_type == 'total_entropy':
print 'dS/dy :', cut_value_low, '-', cut_value_upper
print "Npart: ", npart_min, '-', npart_max
print "b: ", b_min, '-', b_max, ' fm'
print '-'*80
return
def generateSuperMCInitialConditions(numberOfEvents):
"""
Generate initial conditions using superMC. It then yield the absolute
path for all the initial conditions.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
superMCDirectory = path.join(controlParameterList['rootDir'], superMCControl['mainDir'])
superMCDataDirectory = path.join(superMCDirectory, superMCControl['dataDir'])
superMCExecutable = superMCControl['executable']
# clean up the data subfolder for output
cleanUpFolder(superMCDataDirectory)
# check executable
checkExistenceOfExecutable(path.join(superMCDirectory, superMCExecutable))
# set "nev=#" in superMCParameters
superMCParameters[superMCControl['numberOfEventsParameterName']] = numberOfEvents
# form assignment string
assignments = formAssignmentStringFromDict(superMCParameters)
# form executable string
executableString = "nice -n %d ./" % (ProcessNiceness) + superMCExecutable + assignments
# execute!
run(executableString, cwd=superMCDirectory)
# yield initial conditions
for aFile in glob(path.join(superMCDataDirectory, superMCControl['dataFiles'])):
# then yield it
yield path.join(superMCDataDirectory, aFile)
def hydroWithInitialCondition(aFile):
"""
Perform a single hydro calculation with the given absolute path to an
initial condition. Yield the result files.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
hydroDirectory = path.join(controlParameterList['rootDir'], hydroControl['mainDir'])
hydroICDirectory = path.join(hydroDirectory, hydroControl['initialConditionDir'])
hydroResultsDirectory = path.join(hydroDirectory, hydroControl['resultDir'])
hydroExecutable = hydroControl['executable']
# check executable
checkExistenceOfExecutable(path.join(hydroDirectory, hydroExecutable))
# clean up initial and results folder
cleanUpFolder(hydroICDirectory)
cleanUpFolder(hydroResultsDirectory)
# check existence of the initial conditions
if not path.exists(aFile):
raise ExecutionError("Hydro initial condition file %s not found!" % aFile)
# storing initial condition file
if hydroControl['saveICFile']:
copy(aFile, controlParameterList['eventResultDir'])
# move initial condition to the designated folder
move(aFile, path.join(hydroICDirectory, hydroControl['initialConditionFile']))
# form assignment string
assignments = formAssignmentStringFromDict(hydroParameters)
# form executable string
executableString = "nice -n %d ./" % (ProcessNiceness) + hydroExecutable + assignments
# execute!
run(executableString, cwd=hydroDirectory)
# yield result files
worthStoring = []
for aGlob in hydroControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(hydroResultsDirectory, aGlob)))
for aFile in glob(path.join(hydroResultsDirectory, hydroControl['resultFiles'])):
# check if this file worth storing, then copy to event result folder
if aFile in worthStoring:
copy(aFile, controlParameterList['eventResultDir'])
# yield it
yield path.join(hydroResultsDirectory, aFile)
def iSSWithHydroResultFiles(fileList):
"""
Perform iSS calculation using the given list of hydro result files.
Return the path to the OSCAR file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSSDirectory = path.join(controlParameterList['rootDir'], iSSControl['mainDir'])
iSSOperationDirectory = path.join(iSSDirectory, iSSControl['operationDir']) # for both input & output
iSSOSCARFilepath = path.join(iSSDirectory, iSSControl['OSCARFile'])
iSSExecutable = iSSControl['executable']
# check executable
checkExistenceOfExecutable(path.join(iSSDirectory, iSSExecutable))
# clean up operation folder
cleanUpFolder(iSSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
move(aFile, iSSOperationDirectory)
# form assignment string
assignments = formAssignmentStringFromDict(iSSParameters)
# form executable string
executableString = "nice -n %d ./" % (ProcessNiceness) + iSSExecutable + assignments
# execute!
run(executableString, cwd=iSSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
# return OSCAR file path
return iSSOSCARFilepath
def iSWithResonancesWithHydroResultFiles(fileList):
"""
Perform iS calculation using the given list of hydro result files,
followed by resonance calculations and iInteSp calculations.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
iSDirectory = path.join(controlParameterList['rootDir'], iSControl['mainDir'])
iSOperationDirectory = path.join(iSDirectory, iSControl['operationDir']) # for both input & output
iSExecutables = iSControl['executables']
iSExecutionEntry = iSControl['entryShell']
# check executable
checkExistenceOfExecutables([path.join(iSDirectory, aExe) for aExe in iSExecutables])
# clean up operation folder
cleanUpFolder(iSOperationDirectory)
# check existence of hydro result files and move them to operation folder
for aFile in fileList:
if not path.exists(aFile):
raise ExecutionError("Hydro result file %s not found!" % aFile)
else:
move(aFile, iSOperationDirectory)
# execute!
run("nice -n %d bash ./" % (ProcessNiceness) + iSExecutionEntry, cwd=iSDirectory)
# save some of the important result files
worthStoring = []
for aGlob in iSControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(iSOperationDirectory, aGlob)))
for aFile in glob(path.join(iSOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def osc2uFromOSCARFile(OSCARFilePath):
"""
Execute osc2u program using the given path to the OSCAR file. Return the
path to the output file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
osc2uDirectory = path.join(controlParameterList['rootDir'], osc2uControl['mainDir'])
osc2uOutputFilePath = path.join(osc2uDirectory, osc2uControl['outputFilename'])
osc2uExecutable = osc2uControl['executable']
# check executable
checkExistenceOfExecutable(path.join(osc2uDirectory, osc2uExecutable))
# remove output file if already exists
if path.exists(osc2uOutputFilePath):
remove(osc2uOutputFilePath)
# check existence of the OSCAR file then execute
if path.exists(OSCARFilePath):
run("nice -n %d ./" % (ProcessNiceness) + osc2uExecutable + " < " + OSCARFilePath, cwd=osc2uDirectory)
# save OSCAR file
if osc2uControl['saveOSCAR']:
move(OSCARFilePath, controlParameterList['eventResultDir'])
# return the output file path
return osc2uOutputFilePath
def urqmdFromOsc2uOutputFile(osc2uFilePath):
"""
Perform urqmd using osc2u output file. Return the path to the output
file.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
urqmdDirectory = path.join(controlParameterList['rootDir'], urqmdControl['mainDir'])
urqmdOutputFilePath = path.join(urqmdDirectory, urqmdControl['outputFilename'])
urqmdExecutable = urqmdControl['executable']
urqmdExecutionEntry = urqmdControl['entryShell']
# check executable
checkExistenceOfExecutable(path.join(urqmdDirectory, urqmdExecutable))
# remove output file if already exists
if path.exists(urqmdOutputFilePath):
remove(urqmdOutputFilePath)
# clean up IC
urqmdIC = path.join(urqmdDirectory, urqmdControl['ICFilename'])
if path.exists(urqmdIC):
remove(urqmdIC)
# check existence of the osc2u output, move it then execute urqmd
if path.exists(osc2uFilePath):
move(osc2uFilePath, urqmdIC)
run("nice -n %d bash ./" % (ProcessNiceness) + urqmdExecutionEntry, cwd=urqmdDirectory)
# save output file
if urqmdControl['saveOutputFile']:
copy(urqmdOutputFilePath, controlParameterList['eventResultDir'])
# return the output file path
return urqmdOutputFilePath
def binUrqmdResultFiles(urqmdOutputFile):
"""
Bin the output from URQMD to generate flows etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
binUDirectory = path.join(controlParameterList['rootDir'], binUtilitiesControl['mainDir'])
binUOperationDirectory = path.join(binUDirectory, binUtilitiesControl['operationDir'])
binUExecutable = binUtilitiesControl['executable']
# clean up operation folder
cleanUpFolder(binUOperationDirectory)
# check existence urqmd output file
if not path.exists(urqmdOutputFile):
raise ExecutionError("URQMD output file %s not found!" % urqmdOutputFile)
# form executable string
executableString = "nice -n %d python ./" % (ProcessNiceness) + binUExecutable + " " + urqmdOutputFile
# execute!
run(executableString, cwd=binUDirectory)
# save some of the important result files
worthStoring = []
for aGlob in binUtilitiesControl['saveResultGlobs']:
worthStoring.extend(glob(path.join(binUOperationDirectory, aGlob)))
for aFile in glob(path.join(binUOperationDirectory, "*")):
if aFile in worthStoring:
move(aFile, controlParameterList['eventResultDir'])
def collectEbeResultsToDatabaseFrom(folder):
"""
Collect the mostly used results from subfolders that contain hydro
results into a database, including ecc and flow etc.
"""
ProcessNiceness = controlParameterList['niceness']
# set directory strings
collectorDirectory = path.join(controlParameterList['rootDir'], EbeCollectorControl['mainDir'])
# for executable string
simulationType = controlParameterList['simulation_type']
if simulationType == 'hybrid':
collectorExecutable = EbeCollectorControl['executable_hybrid']
executableString = "nice -n %d python ./" % (ProcessNiceness) + collectorExecutable + " %s %g %s %s" % (folder, 1.0/(iSSParameters['number_of_repeated_sampling']*(iSSParameters["y_RB"]-iSSParameters["y_LB"])), EbeCollectorParameters['subfolderPattern'], EbeCollectorParameters['databaseFilename'])
elif simulationType == 'hydro':
collectorExecutable = EbeCollectorControl['executable_hydro']
executableString = "nice -n %d python ./" % (ProcessNiceness) + collectorExecutable + " %s %s %s" % (folder, EbeCollectorParameters['subfolderPattern'], EbeCollectorParameters['databaseFilename'])
# execute
run(executableString, cwd=collectorDirectory)
def formAssignmentStringFromDict(aDict):
"""
Generate a parameter-equals-value string from the given dictionary. The
generated string has a leading blank.
"""
result = ""
for aParameter in aDict.keys():
result += " {}={}".format(aParameter, aDict[aParameter])
return result
def cleanUpFolder(aDir):
""" Delete all data files in the given directory. """
if path.exists(aDir):
try:
run("rm -rf *", cwd=aDir, echo=False)
except OSError:
pass # very likely the the folder is already empty
else:
makedirs(aDir)
def checkExistenceOfExecutable(executableFilename):
""" Check the existence of the executable file, and compile if not. """
if not path.exists(executableFilename):
# build then clean
exec_path, exec_filename = path.split(executableFilename)
run("make", cwd=exec_path)
# if still cannot find the executable
if not path.exists(executableFilename):
raise ExecutionError("Cannot generate executable %s!" % executableFilename)
def checkExistenceOfExecutables(executableFilenames):
"""
Check the existences of the executable files, and compile them if not.
Will call the checkExistenceOfExecutable function.
"""
for executableFilename in executableFilenames:
checkExistenceOfExecutable(executableFilename)
def run(command, cwd=getcwd(), echo=True):
""" Invoke a command from terminal and wait for it to stop. """
if echo:
print("-"*80)
print("In "+cwd)
print("Executing command: "+command)
print("-"*80)
stdout.flush()
return call(command, shell=True, cwd=cwd)
def sequentialEventDriverShell():
"""
Perform a sequential calculations for a given number of events.
Parameters are read from dictionaries given by allParameterList.
"""
try:
# read parameters
readInParameters()
translate_centrality_cut()
# create result folder if necessary
resultDir = controlParameterList['resultDir']
if not path.exists(resultDir):
makedirs(resultDir)
# get simulation type
simulationType = controlParameterList['simulation_type']
# generate initial conditions then loop over initial conditions
event_id = 0
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n" % (event_id, controlParameterList['numberOfEvents']))
stdout.flush()
for aInitialConditionFile in generateSuperMCInitialConditions(controlParameterList['numberOfEvents']):
# get the result folder name for storing results, then create it if necessary
event_id += 1
eventResultDir = path.join(resultDir, controlParameterList['eventResultDirPattern'] % event_id)
controlParameterList['eventResultDir'] = eventResultDir
if path.exists(eventResultDir):
rmtree(eventResultDir)
makedirs(eventResultDir)
# print current progress to terminal
print("Starting event %d..." % event_id)
# perform hydro calculations and get a list of all the result filenames
hydroResultFiles = [aFile for aFile in hydroWithInitialCondition(aInitialConditionFile)]
# fork simulation type here
if simulationType == 'hybrid':
# perform iSS calculation and return the path to the OSCAR file
OSCARFilePath = iSSWithHydroResultFiles(hydroResultFiles)
# perform osc2u
osc2uOutputFilePath = osc2uFromOSCARFile(OSCARFilePath)
# now urqmd
urqmdOutputFilePath = urqmdFromOsc2uOutputFile(osc2uOutputFilePath)
# copy and concatnate final results from all hydro events into one file
combinedUrqmdFile = path.join(controlParameterList['resultDir'], controlParameterList['combinedUrqmdFile'])
open(combinedUrqmdFile, 'a').writelines(open(urqmdOutputFilePath).readlines())
# bin the combined result file to get flows
binUrqmdResultFiles(urqmdOutputFilePath)
# delete the huge final UrQMD combined file
remove(urqmdOutputFilePath)
elif simulationType == 'hydro':
# perform iS calculation and resonance decays
iSWithResonancesWithHydroResultFiles(hydroResultFiles)
# print current progress to terminal
stdout.write("PROGRESS: %d events out of %d finished.\n" % (event_id, controlParameterList['numberOfEvents']))
stdout.flush()
# collect mostly used data into a database
collectEbeResultsToDatabaseFrom(resultDir)
except ExecutionError as e:
print("Errors encountered during execution, aborting.")
raise
finally:
print("Thank you for using. Zhi Qiu, 2013-02")
if __name__ == "__main__":
sequentialEventDriverShell()
| gpl-3.0 |
lepricon49/CouchPotatoServer | libs/guessit/matcher.py | 94 | 7768 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2012 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import unicode_literals
from guessit import PY3, u, base_text_type
from guessit.matchtree import MatchTree
from guessit.textutils import normalize_unicode, clean_string
import logging
log = logging.getLogger(__name__)
class IterativeMatcher(object):
def __init__(self, filename, filetype='autodetect', opts=None, transfo_opts=None):
"""An iterative matcher tries to match different patterns that appear
in the filename.
The 'filetype' argument indicates which type of file you want to match.
If it is 'autodetect', the matcher will try to see whether it can guess
that the file corresponds to an episode, or otherwise will assume it is
a movie.
The recognized 'filetype' values are:
[ autodetect, subtitle, info, movie, moviesubtitle, movieinfo, episode,
episodesubtitle, episodeinfo ]
The IterativeMatcher works mainly in 2 steps:
First, it splits the filename into a match_tree, which is a tree of groups
which have a semantic meaning, such as episode number, movie title,
etc...
The match_tree created looks like the following:
0000000000000000000000000000000000000000000000000000000000000000000000000000000000 111
0000011111111111112222222222222233333333444444444444444455555555666777777778888888 000
0000000000000000000000000000000001111112011112222333333401123334000011233340000000 000
__________________(The.Prestige).______.[____.HP.______.{__-___}.St{__-___}.Chaps].___
xxxxxttttttttttttt ffffff vvvv xxxxxx ll lll xx xxx ccc
[XCT].Le.Prestige.(The.Prestige).DVDRip.[x264.HP.He-Aac.{Fr-Eng}.St{Fr-Eng}.Chaps].mkv
The first 3 lines indicates the group index in which a char in the
filename is located. So for instance, x264 is the group (0, 4, 1), and
it corresponds to a video codec, denoted by the letter'v' in the 4th line.
(for more info, see guess.matchtree.to_string)
Second, it tries to merge all this information into a single object
containing all the found properties, and does some (basic) conflict
resolution when they arise.
When you create the Matcher, you can pass it:
- a list 'opts' of option names, that act as global flags
- a dict 'transfo_opts' of { transfo_name: (transfo_args, transfo_kwargs) }
with which to call the transfo.process() function.
"""
valid_filetypes = ('autodetect', 'subtitle', 'info', 'video',
'movie', 'moviesubtitle', 'movieinfo',
'episode', 'episodesubtitle', 'episodeinfo')
if filetype not in valid_filetypes:
raise ValueError("filetype needs to be one of %s" % valid_filetypes)
if not PY3 and not isinstance(filename, unicode):
log.warning('Given filename to matcher is not unicode...')
filename = filename.decode('utf-8')
filename = normalize_unicode(filename)
if opts is None:
opts = []
if not isinstance(opts, list):
raise ValueError('opts must be a list of option names! Received: type=%s val=%s',
type(opts), opts)
if transfo_opts is None:
transfo_opts = {}
if not isinstance(transfo_opts, dict):
raise ValueError('transfo_opts must be a dict of { transfo_name: (args, kwargs) }. '+
'Received: type=%s val=%s', type(transfo_opts), transfo_opts)
self.match_tree = MatchTree(filename)
# sanity check: make sure we don't process a (mostly) empty string
if clean_string(filename) == '':
return
mtree = self.match_tree
mtree.guess.set('type', filetype, confidence=1.0)
def apply_transfo(transfo_name, *args, **kwargs):
transfo = __import__('guessit.transfo.' + transfo_name,
globals=globals(), locals=locals(),
fromlist=['process'], level=0)
default_args, default_kwargs = transfo_opts.get(transfo_name, ((), {}))
all_args = args or default_args
all_kwargs = dict(default_kwargs)
all_kwargs.update(kwargs) # keep all kwargs merged together
transfo.process(mtree, *all_args, **all_kwargs)
# 1- first split our path into dirs + basename + ext
apply_transfo('split_path_components')
# 2- guess the file type now (will be useful later)
apply_transfo('guess_filetype', filetype)
if mtree.guess['type'] == 'unknown':
return
# 3- split each of those into explicit groups (separated by parentheses
# or square brackets)
apply_transfo('split_explicit_groups')
# 4- try to match information for specific patterns
# NOTE: order needs to comply to the following:
# - website before language (eg: tvu.org.ru vs russian)
# - language before episodes_rexps
# - properties before language (eg: he-aac vs hebrew)
# - release_group before properties (eg: XviD-?? vs xvid)
if mtree.guess['type'] in ('episode', 'episodesubtitle', 'episodeinfo'):
strategy = [ 'guess_date', 'guess_website', 'guess_release_group',
'guess_properties', 'guess_language',
'guess_video_rexps',
'guess_episodes_rexps', 'guess_weak_episodes_rexps' ]
else:
strategy = [ 'guess_date', 'guess_website', 'guess_release_group',
'guess_properties', 'guess_language',
'guess_video_rexps' ]
if 'nolanguage' in opts:
strategy.remove('guess_language')
for name in strategy:
apply_transfo(name)
# more guessers for both movies and episodes
apply_transfo('guess_bonus_features')
apply_transfo('guess_year', skip_first_year=('skip_first_year' in opts))
if 'nocountry' not in opts:
apply_transfo('guess_country')
apply_transfo('guess_idnumber')
# split into '-' separated subgroups (with required separator chars
# around the dash)
apply_transfo('split_on_dash')
# 5- try to identify the remaining unknown groups by looking at their
# position relative to other known elements
if mtree.guess['type'] in ('episode', 'episodesubtitle', 'episodeinfo'):
apply_transfo('guess_episode_info_from_position')
else:
apply_transfo('guess_movie_title_from_position')
# 6- perform some post-processing steps
apply_transfo('post_process')
log.debug('Found match tree:\n%s' % u(mtree))
def matched(self):
return self.match_tree.matched()
| gpl-3.0 |
mKeRix/home-assistant | homeassistant/components/blebox/switch.py | 21 | 1048 | """BleBox switch implementation."""
from homeassistant.components.switch import SwitchEntity
from . import BleBoxEntity, create_blebox_entities
from .const import BLEBOX_TO_HASS_DEVICE_CLASSES
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up a BleBox switch entity."""
create_blebox_entities(
hass, config_entry, async_add_entities, BleBoxSwitchEntity, "switches"
)
class BleBoxSwitchEntity(BleBoxEntity, SwitchEntity):
"""Representation of a BleBox switch feature."""
@property
def device_class(self):
"""Return the device class."""
return BLEBOX_TO_HASS_DEVICE_CLASSES[self._feature.device_class]
@property
def is_on(self):
"""Return whether switch is on."""
return self._feature.is_on
async def async_turn_on(self, **kwargs):
"""Turn on the switch."""
await self._feature.async_turn_on()
async def async_turn_off(self, **kwargs):
"""Turn off the switch."""
await self._feature.async_turn_off()
| mit |
vvtam/googletest | test/gtest_output_test.py | 188 | 12260 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests the text output of Google C++ Testing Framework.
SYNOPSIS
gtest_output_test.py --build_dir=BUILD/DIR --gengolden
# where BUILD/DIR contains the built gtest_output_test_ file.
gtest_output_test.py --gengolden
gtest_output_test.py
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import difflib
import os
import re
import sys
import gtest_test_utils
# The flag for generating the golden file
GENGOLDEN_FLAG = '--gengolden'
CATCH_EXCEPTIONS_ENV_VAR_NAME = 'GTEST_CATCH_EXCEPTIONS'
IS_WINDOWS = os.name == 'nt'
# TODO(vladl@google.com): remove the _lin suffix.
GOLDEN_NAME = 'gtest_output_test_golden_lin.txt'
PROGRAM_PATH = gtest_test_utils.GetTestExecutablePath('gtest_output_test_')
# At least one command we exercise must not have the
# 'internal_skip_environment_and_ad_hoc_tests' argument.
COMMAND_LIST_TESTS = ({}, [PROGRAM_PATH, '--gtest_list_tests'])
COMMAND_WITH_COLOR = ({}, [PROGRAM_PATH, '--gtest_color=yes'])
COMMAND_WITH_TIME = ({}, [PROGRAM_PATH,
'--gtest_print_time',
'internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=FatalFailureTest.*:LoggingTest.*'])
COMMAND_WITH_DISABLED = (
{}, [PROGRAM_PATH,
'--gtest_also_run_disabled_tests',
'internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=*DISABLED_*'])
COMMAND_WITH_SHARDING = (
{'GTEST_SHARD_INDEX': '1', 'GTEST_TOTAL_SHARDS': '2'},
[PROGRAM_PATH,
'internal_skip_environment_and_ad_hoc_tests',
'--gtest_filter=PassingTest.*'])
GOLDEN_PATH = os.path.join(gtest_test_utils.GetSourceDir(), GOLDEN_NAME)
def ToUnixLineEnding(s):
"""Changes all Windows/Mac line endings in s to UNIX line endings."""
return s.replace('\r\n', '\n').replace('\r', '\n')
def RemoveLocations(test_output):
"""Removes all file location info from a Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with all file location info (in the form of
'DIRECTORY/FILE_NAME:LINE_NUMBER: 'or
'DIRECTORY\\FILE_NAME(LINE_NUMBER): ') replaced by
'FILE_NAME:#: '.
"""
return re.sub(r'.*[/\\](.+)(\:\d+|\(\d+\))\: ', r'\1:#: ', test_output)
def RemoveStackTraceDetails(output):
"""Removes all stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n',
'Stack trace: (omitted)\n\n', output)
def RemoveStackTraces(output):
"""Removes all traces of stack traces from a Google Test program's output."""
# *? means "find the shortest string that matches".
return re.sub(r'Stack trace:(.|\n)*?\n\n', '', output)
def RemoveTime(output):
"""Removes all time information from a Google Test program's output."""
return re.sub(r'\(\d+ ms', '(? ms', output)
def RemoveTypeInfoDetails(test_output):
"""Removes compiler-specific type info from Google Test program's output.
Args:
test_output: the output of a Google Test program.
Returns:
output with type information normalized to canonical form.
"""
# some compilers output the name of type 'unsigned int' as 'unsigned'
return re.sub(r'unsigned int', 'unsigned', test_output)
def NormalizeToCurrentPlatform(test_output):
"""Normalizes platform specific output details for easier comparison."""
if IS_WINDOWS:
# Removes the color information that is not present on Windows.
test_output = re.sub('\x1b\\[(0;3\d)?m', '', test_output)
# Changes failure message headers into the Windows format.
test_output = re.sub(r': Failure\n', r': error: ', test_output)
# Changes file(line_number) to file:line_number.
test_output = re.sub(r'((\w|\.)+)\((\d+)\):', r'\1:\3:', test_output)
return test_output
def RemoveTestCounts(output):
"""Removes test counts from a Google Test program's output."""
output = re.sub(r'\d+ tests?, listed below',
'? tests, listed below', output)
output = re.sub(r'\d+ FAILED TESTS',
'? FAILED TESTS', output)
output = re.sub(r'\d+ tests? from \d+ test cases?',
'? tests from ? test cases', output)
output = re.sub(r'\d+ tests? from ([a-zA-Z_])',
r'? tests from \1', output)
return re.sub(r'\d+ tests?\.', '? tests.', output)
def RemoveMatchingTests(test_output, pattern):
"""Removes output of specified tests from a Google Test program's output.
This function strips not only the beginning and the end of a test but also
all output in between.
Args:
test_output: A string containing the test output.
pattern: A regex string that matches names of test cases or
tests to remove.
Returns:
Contents of test_output with tests whose names match pattern removed.
"""
test_output = re.sub(
r'.*\[ RUN \] .*%s(.|\n)*?\[( FAILED | OK )\] .*%s.*\n' % (
pattern, pattern),
'',
test_output)
return re.sub(r'.*%s.*\n' % pattern, '', test_output)
def NormalizeOutput(output):
"""Normalizes output (the output of gtest_output_test_.exe)."""
output = ToUnixLineEnding(output)
output = RemoveLocations(output)
output = RemoveStackTraceDetails(output)
output = RemoveTime(output)
return output
def GetShellCommandOutput(env_cmd):
"""Runs a command in a sub-process, and returns its output in a string.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
Returns:
A string with the command's combined standard and diagnostic output.
"""
# Spawns cmd in a sub-process, and gets its standard I/O file objects.
# Set and save the environment properly.
environ = os.environ.copy()
environ.update(env_cmd[0])
p = gtest_test_utils.Subprocess(env_cmd[1], env=environ)
return p.output
def GetCommandOutput(env_cmd):
"""Runs a command and returns its output with all file location
info stripped off.
Args:
env_cmd: The shell command. A 2-tuple where element 0 is a dict of extra
environment variables to set, and element 1 is a string with
the command and any flags.
"""
# Disables exception pop-ups on Windows.
environ, cmdline = env_cmd
environ = dict(environ) # Ensures we are modifying a copy.
environ[CATCH_EXCEPTIONS_ENV_VAR_NAME] = '1'
return NormalizeOutput(GetShellCommandOutput((environ, cmdline)))
def GetOutputOfAllCommands():
"""Returns concatenated output from several representative commands."""
return (GetCommandOutput(COMMAND_WITH_COLOR) +
GetCommandOutput(COMMAND_WITH_TIME) +
GetCommandOutput(COMMAND_WITH_DISABLED) +
GetCommandOutput(COMMAND_WITH_SHARDING))
test_list = GetShellCommandOutput(COMMAND_LIST_TESTS)
SUPPORTS_DEATH_TESTS = 'DeathTest' in test_list
SUPPORTS_TYPED_TESTS = 'TypedTest' in test_list
SUPPORTS_THREADS = 'ExpectFailureWithThreadsTest' in test_list
SUPPORTS_STACK_TRACES = False
CAN_GENERATE_GOLDEN_FILE = (SUPPORTS_DEATH_TESTS and
SUPPORTS_TYPED_TESTS and
SUPPORTS_THREADS and
not IS_WINDOWS)
class GTestOutputTest(gtest_test_utils.TestCase):
def RemoveUnsupportedTests(self, test_output):
if not SUPPORTS_DEATH_TESTS:
test_output = RemoveMatchingTests(test_output, 'DeathTest')
if not SUPPORTS_TYPED_TESTS:
test_output = RemoveMatchingTests(test_output, 'TypedTest')
test_output = RemoveMatchingTests(test_output, 'TypedDeathTest')
test_output = RemoveMatchingTests(test_output, 'TypeParamDeathTest')
if not SUPPORTS_THREADS:
test_output = RemoveMatchingTests(test_output,
'ExpectFailureWithThreadsTest')
test_output = RemoveMatchingTests(test_output,
'ScopedFakeTestPartResultReporterTest')
test_output = RemoveMatchingTests(test_output,
'WorksConcurrently')
if not SUPPORTS_STACK_TRACES:
test_output = RemoveStackTraces(test_output)
return test_output
def testOutput(self):
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'rb')
# A mis-configured source control system can cause \r appear in EOL
# sequences when we read the golden file irrespective of an operating
# system used. Therefore, we need to strip those \r's from newlines
# unconditionally.
golden = ToUnixLineEnding(golden_file.read())
golden_file.close()
# We want the test to pass regardless of certain features being
# supported or not.
# We still have to remove type name specifics in all cases.
normalized_actual = RemoveTypeInfoDetails(output)
normalized_golden = RemoveTypeInfoDetails(golden)
if CAN_GENERATE_GOLDEN_FILE:
self.assertEqual(normalized_golden, normalized_actual,
'\n'.join(difflib.unified_diff(
normalized_golden.split('\n'),
normalized_actual.split('\n'),
'golden', 'actual')))
else:
normalized_actual = NormalizeToCurrentPlatform(
RemoveTestCounts(normalized_actual))
normalized_golden = NormalizeToCurrentPlatform(
RemoveTestCounts(self.RemoveUnsupportedTests(normalized_golden)))
# This code is very handy when debugging golden file differences:
if os.getenv('DEBUG_GTEST_OUTPUT_TEST'):
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_actual.txt'), 'wb').write(
normalized_actual)
open(os.path.join(
gtest_test_utils.GetSourceDir(),
'_gtest_output_test_normalized_golden.txt'), 'wb').write(
normalized_golden)
self.assertEqual(normalized_golden, normalized_actual)
if __name__ == '__main__':
if sys.argv[1:] == [GENGOLDEN_FLAG]:
if CAN_GENERATE_GOLDEN_FILE:
output = GetOutputOfAllCommands()
golden_file = open(GOLDEN_PATH, 'wb')
golden_file.write(output)
golden_file.close()
else:
message = (
"""Unable to write a golden file when compiled in an environment
that does not support all the required features (death tests, typed tests,
and multiple threads). Please generate the golden file using a binary built
with those features enabled.""")
sys.stderr.write(message)
sys.exit(1)
else:
gtest_test_utils.Main()
| bsd-3-clause |
benfinke/ns_python | build/lib/nssrc/com/citrix/netscaler/nitro/resource/config/cr/crvserver_cspolicy_binding.py | 3 | 7362 | #
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class crvserver_cspolicy_binding(base_resource) :
""" Binding class showing the cspolicy that can be bound to crvserver.
"""
def __init__(self) :
self._policyname = ""
self._targetvserver = ""
self._priority = 0
self._hits = 0
self._name = ""
self.___count = 0
@property
def priority(self) :
ur"""The priority for the policy.
"""
try :
return self._priority
except Exception as e:
raise e
@priority.setter
def priority(self, priority) :
ur"""The priority for the policy.
"""
try :
self._priority = priority
except Exception as e:
raise e
@property
def policyname(self) :
ur"""Policies bound to this vserver.
"""
try :
return self._policyname
except Exception as e:
raise e
@policyname.setter
def policyname(self, policyname) :
ur"""Policies bound to this vserver.
"""
try :
self._policyname = policyname
except Exception as e:
raise e
@property
def name(self) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1.
"""
try :
return self._name
except Exception as e:
raise e
@name.setter
def name(self, name) :
ur"""Name of the cache redirection virtual server to which to bind the cache redirection policy.<br/>Minimum length = 1
"""
try :
self._name = name
except Exception as e:
raise e
@property
def targetvserver(self) :
ur"""The CSW target server names.
"""
try :
return self._targetvserver
except Exception as e:
raise e
@targetvserver.setter
def targetvserver(self, targetvserver) :
ur"""The CSW target server names.
"""
try :
self._targetvserver = targetvserver
except Exception as e:
raise e
@property
def hits(self) :
ur"""Number of hits.
"""
try :
return self._hits
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
ur""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(crvserver_cspolicy_binding_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.crvserver_cspolicy_binding
except Exception as e :
raise e
def _get_object_name(self) :
ur""" Returns the value of object identifier argument
"""
try :
if self.name is not None :
return str(self.name)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
try :
if resource and type(resource) is not list :
updateresource = crvserver_cspolicy_binding()
updateresource.name = resource.name
updateresource.policyname = resource.policyname
updateresource.targetvserver = resource.targetvserver
updateresource.priority = resource.priority
return updateresource.update_resource(client)
else :
if resource and len(resource) > 0 :
updateresources = [crvserver_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].name = resource[i].name
updateresources[i].policyname = resource[i].policyname
updateresources[i].targetvserver = resource[i].targetvserver
updateresources[i].priority = resource[i].priority
return cls.update_bulk_request(client, updateresources)
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
try :
if resource and type(resource) is not list :
deleteresource = crvserver_cspolicy_binding()
deleteresource.name = resource.name
deleteresource.policyname = resource.policyname
return deleteresource.delete_resource(client)
else :
if resource and len(resource) > 0 :
deleteresources = [crvserver_cspolicy_binding() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].name = resource[i].name
deleteresources[i].policyname = resource[i].policyname
return cls.delete_bulk_request(client, deleteresources)
except Exception as e :
raise e
@classmethod
def get(cls, service, name) :
ur""" Use this API to fetch crvserver_cspolicy_binding resources.
"""
try :
obj = crvserver_cspolicy_binding()
obj.name = name
response = obj.get_resources(service)
return response
except Exception as e:
raise e
@classmethod
def get_filtered(cls, service, name, filter_) :
ur""" Use this API to fetch filtered set of crvserver_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_cspolicy_binding()
obj.name = name
option_ = options()
option_.filter = filter_
response = obj.getfiltered(service, option_)
return response
except Exception as e:
raise e
@classmethod
def count(cls, service, name) :
ur""" Use this API to count crvserver_cspolicy_binding resources configued on NetScaler.
"""
try :
obj = crvserver_cspolicy_binding()
obj.name = name
option_ = options()
option_.count = True
response = obj.get_resources(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
@classmethod
def count_filtered(cls, service, name, filter_) :
ur""" Use this API to count the filtered set of crvserver_cspolicy_binding resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = crvserver_cspolicy_binding()
obj.name = name
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(service, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e:
raise e
class crvserver_cspolicy_binding_response(base_response) :
def __init__(self, length=1) :
self.crvserver_cspolicy_binding = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.crvserver_cspolicy_binding = [crvserver_cspolicy_binding() for _ in range(length)]
| apache-2.0 |
denis-pitul/django | django/contrib/gis/gdal/libgdal.py | 449 | 3598 | from __future__ import unicode_literals
import logging
import os
import re
from ctypes import CDLL, CFUNCTYPE, c_char_p, c_int
from ctypes.util import find_library
from django.contrib.gis.gdal.error import GDALException
from django.core.exceptions import ImproperlyConfigured
logger = logging.getLogger('django.contrib.gis')
# Custom library path set?
try:
from django.conf import settings
lib_path = settings.GDAL_LIBRARY_PATH
except (AttributeError, EnvironmentError,
ImportError, ImproperlyConfigured):
lib_path = None
if lib_path:
lib_names = None
elif os.name == 'nt':
# Windows NT shared libraries
lib_names = ['gdal111', 'gdal110', 'gdal19', 'gdal18', 'gdal17']
elif os.name == 'posix':
# *NIX library names.
lib_names = ['gdal', 'GDAL', 'gdal1.11.0', 'gdal1.10.0', 'gdal1.9.0',
'gdal1.8.0', 'gdal1.7.0']
else:
raise GDALException('Unsupported OS "%s"' % os.name)
# Using the ctypes `find_library` utility to find the
# path to the GDAL library from the list of library names.
if lib_names:
for lib_name in lib_names:
lib_path = find_library(lib_name)
if lib_path is not None:
break
if lib_path is None:
raise GDALException('Could not find the GDAL library (tried "%s"). '
'Try setting GDAL_LIBRARY_PATH in your settings.' %
'", "'.join(lib_names))
# This loads the GDAL/OGR C library
lgdal = CDLL(lib_path)
# On Windows, the GDAL binaries have some OSR routines exported with
# STDCALL, while others are not. Thus, the library will also need to
# be loaded up as WinDLL for said OSR functions that require the
# different calling convention.
if os.name == 'nt':
from ctypes import WinDLL
lwingdal = WinDLL(lib_path)
def std_call(func):
"""
Returns the correct STDCALL function for certain OSR routines on Win32
platforms.
"""
if os.name == 'nt':
return lwingdal[func]
else:
return lgdal[func]
# #### Version-information functions. ####
# Returns GDAL library version information with the given key.
_version_info = std_call('GDALVersionInfo')
_version_info.argtypes = [c_char_p]
_version_info.restype = c_char_p
def gdal_version():
"Returns only the GDAL version number information."
return _version_info(b'RELEASE_NAME')
def gdal_full_version():
"Returns the full GDAL version information."
return _version_info('')
version_regex = re.compile(r'^(?P<major>\d+)\.(?P<minor>\d+)(\.(?P<subminor>\d+))?')
def gdal_version_info():
ver = gdal_version().decode()
m = version_regex.match(ver)
if not m:
raise GDALException('Could not parse GDAL version string "%s"' % ver)
return {key: m.group(key) for key in ('major', 'minor', 'subminor')}
_verinfo = gdal_version_info()
GDAL_MAJOR_VERSION = int(_verinfo['major'])
GDAL_MINOR_VERSION = int(_verinfo['minor'])
GDAL_SUBMINOR_VERSION = _verinfo['subminor'] and int(_verinfo['subminor'])
GDAL_VERSION = (GDAL_MAJOR_VERSION, GDAL_MINOR_VERSION, GDAL_SUBMINOR_VERSION)
del _verinfo
# Set library error handling so as errors are logged
CPLErrorHandler = CFUNCTYPE(None, c_int, c_int, c_char_p)
def err_handler(error_class, error_number, message):
logger.error('GDAL_ERROR %d: %s' % (error_number, message))
err_handler = CPLErrorHandler(err_handler)
def function(name, args, restype):
func = std_call(name)
func.argtypes = args
func.restype = restype
return func
set_error_handler = function('CPLSetErrorHandler', [CPLErrorHandler], CPLErrorHandler)
set_error_handler(err_handler)
| bsd-3-clause |
MusikPolice/beets | test/test_mediafile_basic.py | 1 | 12688 | # This file is part of beets.
# Copyright 2012, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Automatically-generated blanket testing for the MediaFile metadata
layer.
"""
import os
import shutil
import datetime
import _common
from _common import unittest
import beets.mediafile
CORRECT_DICTS = {
# All of the fields iTunes supports that we do also.
'full': {
'title': u'full',
'artist': u'the artist',
'album': u'the album',
'genre': u'the genre',
'composer': u'the composer',
'grouping': u'the grouping',
'year': 2001,
'month': 0,
'day': 0,
'date': datetime.date(2001, 1, 1),
'track': 2,
'tracktotal': 3,
'disc': 4,
'disctotal': 5,
'lyrics': u'the lyrics',
'comments': u'the comments',
'bpm': 6,
'comp': True,
'mb_trackid': '8b882575-08a5-4452-a7a7-cbb8a1531f9e',
'mb_albumid': '9e873859-8aa4-4790-b985-5a953e8ef628',
'mb_artistid':'7cf0ea9d-86b9-4dad-ba9e-2355a64899ea',
'art': None,
'label': u'the label',
},
# Additional coverage for common cases when "total" fields are unset.
# Created with iTunes. (Also tests unset MusicBrainz fields.)
'partial': {
'track': 2,
'tracktotal': 0,
'disc': 4,
'disctotal': 0,
'mb_trackid': '',
'mb_albumid': '',
'mb_artistid':'',
},
'min': {
'track': 0,
'tracktotal': 0,
'disc': 0,
'disctotal': 0
},
# ID3 tag deleted with `mp3info -d`. Tests default values.
'empty': {
'title': u'',
'artist': u'',
'album': u'',
'genre': u'',
'composer': u'',
'grouping': u'',
'year': 0,
'month': 0,
'day': 0,
'date': datetime.date.min,
'track': 0,
'tracktotal': 0,
'disc': 0,
'disctotal': 0,
'lyrics': u'',
'comments': u'',
'bpm': 0,
'comp': False,
'mb_trackid': u'',
'mb_albumid': u'',
'mb_artistid':u'',
'art': None,
'label': u'',
# Additional, non-iTunes fields.
'rg_track_peak': 0.0,
'rg_track_gain': 0.0,
'rg_album_peak': 0.0,
'rg_album_gain': 0.0,
'albumartist': u'',
'mb_albumartistid': u'',
'artist_sort': u'',
'albumartist_sort': u'',
'acoustid_fingerprint': u'',
'acoustid_id': u'',
'mb_releasegroupid': u'',
'asin': u'',
'catalognum': u'',
'disctitle': u'',
'encoder': u'',
'script': u'',
'language': u'',
'country': u'',
'albumstatus': u'',
'media': u'',
'albumdisambig': u'',
'artist_credit': u'',
'albumartist_credit': u'',
},
# Full release date.
'date': {
'year': 1987,
'month': 3,
'day': 31,
'date': datetime.date(1987, 3, 31)
},
}
READ_ONLY_CORRECT_DICTS = {
'full.mp3': {
'length': 1.0,
'bitrate': 80000,
'format': 'MP3',
'samplerate': 44100,
'bitdepth': 0,
'channels': 1,
},
'full.flac': {
'length': 1.0,
'bitrate': 175120,
'format': 'FLAC',
'samplerate': 44100,
'bitdepth': 16,
'channels': 1,
},
'full.m4a': {
'length': 1.0,
'bitrate': 64000,
'format': 'AAC',
'samplerate': 44100,
'bitdepth': 16,
'channels': 2,
},
'full.ogg': {
'length': 1.0,
'bitrate': 48000,
'format': 'OGG',
'samplerate': 44100,
'bitdepth': 0,
'channels': 1,
},
'full.ape': {
'length': 1.0,
'bitrate': 112040,
'format': 'APE',
'samplerate': 44100,
'bitdepth': 16,
'channels': 1,
},
'full.wv': {
'length': 1.0,
'bitrate': 108744,
'format': 'WavPack',
'samplerate': 44100,
'bitdepth': 0,
'channels': 1,
},
'full.mpc': {
'length': 1.0,
'bitrate': 23,
'format': 'Musepack',
'samplerate': 44100,
'bitdepth': 0,
'channels': 2,
},
}
TEST_FILES = {
'm4a': ['full', 'partial', 'min'],
'mp3': ['full', 'partial', 'min'],
'flac': ['full', 'partial', 'min'],
'ogg': ['full'],
'ape': ['full'],
'wv': ['full'],
'mpc': ['full'],
}
class AllFilesMixin(object):
"""This is a dumb bit of copypasta but unittest has no supported
method of generating tests at runtime.
"""
def test_m4a_full(self):
self._run('full', 'm4a')
def test_m4a_partial(self):
self._run('partial', 'm4a')
def test_m4a_min(self):
self._run('min', 'm4a')
def test_mp3_full(self):
self._run('full', 'mp3')
def test_mp3_partial(self):
self._run('partial', 'mp3')
def test_mp3_min(self):
self._run('min', 'mp3')
def test_flac_full(self):
self._run('full', 'flac')
def test_flac_partial(self):
self._run('partial', 'flac')
def test_flac_min(self):
self._run('min', 'flac')
def test_ogg(self):
self._run('full', 'ogg')
def test_ape(self):
self._run('full', 'ape')
def test_wv(self):
self._run('full', 'wv')
def test_mpc(self):
self._run('full', 'mpc')
# Special test for advanced release date.
def test_date_mp3(self):
self._run('date', 'mp3')
class ReadingTest(unittest.TestCase, AllFilesMixin):
def _read_field(self, mf, correct_dict, field):
got = getattr(mf, field)
correct = correct_dict[field]
message = field + ' incorrect (expected ' + repr(correct) + \
', got ' + repr(got) + ')'
if isinstance(correct, float):
self.assertAlmostEqual(got, correct, msg=message)
else:
self.assertEqual(got, correct, message)
def _run(self, tagset, kind):
correct_dict = CORRECT_DICTS[tagset]
path = os.path.join(_common.RSRC, tagset + '.' + kind)
f = beets.mediafile.MediaFile(path)
for field in correct_dict:
if 'm4a' in path and field.startswith('rg_'):
# MPEG-4 files: ReplayGain values not implemented.
continue
self._read_field(f, correct_dict, field)
# Special test for missing ID3 tag.
def test_empy_mp3(self):
self._run('empty', 'mp3')
class WritingTest(unittest.TestCase, AllFilesMixin):
def _write_field(self, tpath, field, value, correct_dict):
# Write new tag.
a = beets.mediafile.MediaFile(tpath)
setattr(a, field, value)
a.save()
# Verify ALL tags are correct with modification.
b = beets.mediafile.MediaFile(tpath)
for readfield in correct_dict.keys():
got = getattr(b, readfield)
# Make sure the modified field was changed correctly...
if readfield == field:
message = field + ' modified incorrectly (changed to ' + \
repr(value) + ' but read ' + repr(got) + ')'
if isinstance(value, float):
self.assertAlmostEqual(got, value, msg=message)
else:
self.assertEqual(got, value, message)
# ... and that no other field was changed.
else:
# MPEG-4: ReplayGain not implented.
if 'm4a' in tpath and readfield.startswith('rg_'):
continue
# The value should be what it was originally most of the
# time.
correct = correct_dict[readfield]
# The date field, however, is modified when its components
# change.
if readfield=='date' and field in ('year', 'month', 'day'):
try:
correct = datetime.date(
value if field=='year' else correct.year,
value if field=='month' else correct.month,
value if field=='day' else correct.day
)
except ValueError:
correct = datetime.date.min
# And vice-versa.
if field=='date' and readfield in ('year', 'month', 'day'):
correct = getattr(value, readfield)
message = readfield + ' changed when it should not have' \
' (expected ' + repr(correct) + ', got ' + \
repr(got) + ') when modifying ' + field
if isinstance(correct, float):
self.assertAlmostEqual(got, correct, msg=message)
else:
self.assertEqual(got, correct, message)
def _run(self, tagset, kind):
correct_dict = CORRECT_DICTS[tagset]
path = os.path.join(_common.RSRC, tagset + '.' + kind)
for field in correct_dict:
if field == 'month' and correct_dict['year'] == 0 or \
field == 'day' and correct_dict['month'] == 0:
continue
# Generate the new value we'll try storing.
if field == 'art':
value = 'xxx'
elif type(correct_dict[field]) is unicode:
value = u'TestValue: ' + field
elif type(correct_dict[field]) is int:
value = correct_dict[field] + 42
elif type(correct_dict[field]) is bool:
value = not correct_dict[field]
elif type(correct_dict[field]) is datetime.date:
value = correct_dict[field] + datetime.timedelta(42)
elif type(correct_dict[field]) is str:
value = 'TestValue-' + str(field)
elif type(correct_dict[field]) is float:
value = 9.87
else:
raise ValueError('unknown field type ' + \
str(type(correct_dict[field])))
# Make a copy of the file we'll work on.
root, ext = os.path.splitext(path)
tpath = root + '_test' + ext
shutil.copy(path, tpath)
try:
self._write_field(tpath, field, value, correct_dict)
finally:
os.remove(tpath)
class ReadOnlyTest(unittest.TestCase):
def _read_field(self, mf, field, value):
got = getattr(mf, field)
fail_msg = field + ' incorrect (expected ' + \
repr(value) + ', got ' + repr(got) + ')'
if field == 'length':
self.assertTrue(value-0.1 < got < value+0.1, fail_msg)
else:
self.assertEqual(got, value, fail_msg)
def _run(self, filename):
path = os.path.join(_common.RSRC, filename)
f = beets.mediafile.MediaFile(path)
correct_dict = READ_ONLY_CORRECT_DICTS[filename]
for field, value in correct_dict.items():
self._read_field(f, field, value)
def test_mp3(self):
self._run('full.mp3')
def test_m4a(self):
self._run('full.m4a')
def test_flac(self):
self._run('full.flac')
def test_ogg(self):
self._run('full.ogg')
def test_ape(self):
self._run('full.ape')
def test_wv(self):
self._run('full.wv')
def test_mpc(self):
self._run('full.mpc')
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == '__main__':
unittest.main(defaultTest='suite')
| mit |
ministryofjustice/manchester_traffic_offences_pleas | apps/monitoring/tests.py | 1 | 3914 | import datetime as dt
from django.test import TestCase
from apps.plea.models import Court, Case, OUCode
from .views import CourtDataView
class TestStatsLogic(TestCase):
def setUp(self):
self.court = self._create_court()
self.court_view = CourtDataView()
@staticmethod
def _create_court(**fields):
ou_codes = fields.pop("ou_codes", [])
data = dict(
region_code="20",
court_name="TEST COURT 1",
enabled=True,
court_address="123 Court",
court_telephone="0800 COURT",
court_receipt_email="test@test.com",
submission_email="test@test.com",
test_mode=False
)
data.update(fields)
court = Court.objects.create(**data)
for ou_code in ou_codes:
OUCode.objects.create(court=court, ou_code=ou_code)
return court
@staticmethod
def _create_case(**fields):
data = dict(
imported=False,
ou_code=None,
completed_on=None
)
data.update(fields)
return Case.objects.create(**data)
def test_soap_gateway_imported_submissions_count(self):
self._create_case(urn="20XX0000000",
imported=True)
self._create_case(urn="20XX0000001",
imported=False)
self._create_case(urn="22XX0000001",
imported=False)
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["imported"]["value"], 1)
def test_completed_submission_count(self):
self._create_case(urn="20XX0000000",
completed_on=dt.datetime.now())
self._create_case(urn="20XX0000001")
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["submissions"]["value"], 1)
def test_unvalidated_submission_count(self):
self._create_case(urn="20XX0000000",
imported=True,
completed_on=dt.datetime.now())
self._create_case(urn="20XX0000001",
completed_on=dt.datetime.now())
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["unvalidated_submissions"]["value"], 1)
def test_failed_email_sending_count(self):
self._create_case(urn="20XX0000000",
sent=False,
completed_on=dt.datetime.now())
self._create_case(urn="20XX0000001",
sent=True,
completed_on=dt.datetime.now())
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["email_failure"]["value"], 1)
def test_sjp_case_import_count(self):
self._create_case(urn="20XX0000000",
initiation_type="J",
completed_on=dt.datetime.now())
self._create_case(urn="20XX0000001",
completed_on=dt.datetime.now())
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["sjp_count"]["value"], 1)
def test_completed_on_with_oucode(self):
"""
If a court specifies ou codes, then only match cases that have that OU code.
"""
self._create_case(urn="20XX0000000",
completed_on=dt.datetime.now(),
ou_code="B01CY")
self._create_case(urn="20XX0000001",
completed_on=dt.datetime.now(),
ou_code="B01LY")
OUCode.objects.create(court=self.court, ou_code="B01CY")
stats = self.court_view._get_stats(self.court, dt.date.today())
self.assertEquals(stats["submissions"]["value"], 1)
| mit |
scipy/scipy | scipy/stats/tests/test_tukeylambda_stats.py | 21 | 3232 | import numpy as np
from numpy.testing import assert_allclose, assert_equal
from scipy.stats._tukeylambda_stats import (tukeylambda_variance,
tukeylambda_kurtosis)
def test_tukeylambda_stats_known_exact():
"""Compare results with some known exact formulas."""
# Some exact values of the Tukey Lambda variance and kurtosis:
# lambda var kurtosis
# 0 pi**2/3 6/5 (logistic distribution)
# 0.5 4 - pi (5/3 - pi/2)/(pi/4 - 1)**2 - 3
# 1 1/3 -6/5 (uniform distribution on (-1,1))
# 2 1/12 -6/5 (uniform distribution on (-1/2, 1/2))
# lambda = 0
var = tukeylambda_variance(0)
assert_allclose(var, np.pi**2 / 3, atol=1e-12)
kurt = tukeylambda_kurtosis(0)
assert_allclose(kurt, 1.2, atol=1e-10)
# lambda = 0.5
var = tukeylambda_variance(0.5)
assert_allclose(var, 4 - np.pi, atol=1e-12)
kurt = tukeylambda_kurtosis(0.5)
desired = (5./3 - np.pi/2) / (np.pi/4 - 1)**2 - 3
assert_allclose(kurt, desired, atol=1e-10)
# lambda = 1
var = tukeylambda_variance(1)
assert_allclose(var, 1.0 / 3, atol=1e-12)
kurt = tukeylambda_kurtosis(1)
assert_allclose(kurt, -1.2, atol=1e-10)
# lambda = 2
var = tukeylambda_variance(2)
assert_allclose(var, 1.0 / 12, atol=1e-12)
kurt = tukeylambda_kurtosis(2)
assert_allclose(kurt, -1.2, atol=1e-10)
def test_tukeylambda_stats_mpmath():
"""Compare results with some values that were computed using mpmath."""
a10 = dict(atol=1e-10, rtol=0)
a12 = dict(atol=1e-12, rtol=0)
data = [
# lambda variance kurtosis
[-0.1, 4.78050217874253547, 3.78559520346454510],
[-0.0649, 4.16428023599895777, 2.52019675947435718],
[-0.05, 3.93672267890775277, 2.13129793057777277],
[-0.001, 3.30128380390964882, 1.21452460083542988],
[0.001, 3.27850775649572176, 1.18560634779287585],
[0.03125, 2.95927803254615800, 0.804487555161819980],
[0.05, 2.78281053405464501, 0.611604043886644327],
[0.0649, 2.65282386754100551, 0.476834119532774540],
[1.2, 0.242153920578588346, -1.23428047169049726],
[10.0, 0.00095237579757703597, 2.37810697355144933],
[20.0, 0.00012195121951131043, 7.37654321002709531],
]
for lam, var_expected, kurt_expected in data:
var = tukeylambda_variance(lam)
assert_allclose(var, var_expected, **a12)
kurt = tukeylambda_kurtosis(lam)
assert_allclose(kurt, kurt_expected, **a10)
# Test with vector arguments (most of the other tests are for single
# values).
lam, var_expected, kurt_expected = zip(*data)
var = tukeylambda_variance(lam)
assert_allclose(var, var_expected, **a12)
kurt = tukeylambda_kurtosis(lam)
assert_allclose(kurt, kurt_expected, **a10)
def test_tukeylambda_stats_invalid():
"""Test values of lambda outside the domains of the functions."""
lam = [-1.0, -0.5]
var = tukeylambda_variance(lam)
assert_equal(var, np.array([np.nan, np.inf]))
lam = [-1.0, -0.25]
kurt = tukeylambda_kurtosis(lam)
assert_equal(kurt, np.array([np.nan, np.inf]))
| bsd-3-clause |
ryfeus/lambda-packs | Shapely_numpy/source/numpy/core/tests/test_datetime.py | 23 | 90944 | from __future__ import division, absolute_import, print_function
import pickle
import numpy
import numpy as np
import datetime
from numpy.compat import asbytes
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_raises,
assert_warns, dec, suppress_warnings
)
# Use pytz to test out various time zones if available
try:
from pytz import timezone as tz
_has_pytz = True
except ImportError:
_has_pytz = False
class TestDateTime(TestCase):
def test_datetime_dtype_creation(self):
for unit in ['Y', 'M', 'W', 'D',
'h', 'm', 's', 'ms', 'us',
'ns', 'ps', 'fs', 'as']:
dt1 = np.dtype('M8[750%s]' % unit)
assert_(dt1 == np.dtype('datetime64[750%s]' % unit))
dt2 = np.dtype('m8[%s]' % unit)
assert_(dt2 == np.dtype('timedelta64[%s]' % unit))
# Generic units shouldn't add [] to the end
assert_equal(str(np.dtype("M8")), "datetime64")
# Should be possible to specify the endianness
assert_equal(np.dtype("=M8"), np.dtype("M8"))
assert_equal(np.dtype("=M8[s]"), np.dtype("M8[s]"))
assert_(np.dtype(">M8") == np.dtype("M8") or
np.dtype("<M8") == np.dtype("M8"))
assert_(np.dtype(">M8[D]") == np.dtype("M8[D]") or
np.dtype("<M8[D]") == np.dtype("M8[D]"))
assert_(np.dtype(">M8") != np.dtype("<M8"))
assert_equal(np.dtype("=m8"), np.dtype("m8"))
assert_equal(np.dtype("=m8[s]"), np.dtype("m8[s]"))
assert_(np.dtype(">m8") == np.dtype("m8") or
np.dtype("<m8") == np.dtype("m8"))
assert_(np.dtype(">m8[D]") == np.dtype("m8[D]") or
np.dtype("<m8[D]") == np.dtype("m8[D]"))
assert_(np.dtype(">m8") != np.dtype("<m8"))
# Check that the parser rejects bad datetime types
assert_raises(TypeError, np.dtype, 'M8[badunit]')
assert_raises(TypeError, np.dtype, 'm8[badunit]')
assert_raises(TypeError, np.dtype, 'M8[YY]')
assert_raises(TypeError, np.dtype, 'm8[YY]')
assert_raises(TypeError, np.dtype, 'm4')
assert_raises(TypeError, np.dtype, 'M7')
assert_raises(TypeError, np.dtype, 'm7')
assert_raises(TypeError, np.dtype, 'M16')
assert_raises(TypeError, np.dtype, 'm16')
def test_datetime_casting_rules(self):
# Cannot cast safely/same_kind between timedelta and datetime
assert_(not np.can_cast('m8', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8', 'M8', casting='safe'))
assert_(not np.can_cast('M8', 'm8', casting='safe'))
# Can cast safely/same_kind from integer to timedelta
assert_(np.can_cast('i8', 'm8', casting='same_kind'))
assert_(np.can_cast('i8', 'm8', casting='safe'))
# Cannot cast safely/same_kind from float to timedelta
assert_(not np.can_cast('f4', 'm8', casting='same_kind'))
assert_(not np.can_cast('f4', 'm8', casting='safe'))
# Cannot cast safely/same_kind from integer to datetime
assert_(not np.can_cast('i8', 'M8', casting='same_kind'))
assert_(not np.can_cast('i8', 'M8', casting='safe'))
# Cannot cast safely/same_kind from bool to datetime
assert_(not np.can_cast('b1', 'M8', casting='same_kind'))
assert_(not np.can_cast('b1', 'M8', casting='safe'))
# Can cast safely/same_kind from bool to timedelta
assert_(np.can_cast('b1', 'm8', casting='same_kind'))
assert_(np.can_cast('b1', 'm8', casting='safe'))
# Can cast datetime safely from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='safe'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='safe'))
# Cannot cast timedelta safely from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='safe'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='safe'))
# Can cast datetime same_kind from months/years to days
assert_(np.can_cast('M8[M]', 'M8[D]', casting='same_kind'))
assert_(np.can_cast('M8[Y]', 'M8[D]', casting='same_kind'))
# Can't cast timedelta same_kind from months/years to days
assert_(not np.can_cast('m8[M]', 'm8[D]', casting='same_kind'))
assert_(not np.can_cast('m8[Y]', 'm8[D]', casting='same_kind'))
# Can cast datetime same_kind across the date/time boundary
assert_(np.can_cast('M8[D]', 'M8[h]', casting='same_kind'))
# Can cast timedelta same_kind across the date/time boundary
assert_(np.can_cast('m8[D]', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8[h]', 'm8[D]', casting='same_kind'))
# Cannot cast safely if the integer multiplier doesn't divide
assert_(not np.can_cast('M8[7h]', 'M8[3h]', casting='safe'))
assert_(not np.can_cast('M8[3h]', 'M8[6h]', casting='safe'))
# But can cast same_kind
assert_(np.can_cast('M8[7h]', 'M8[3h]', casting='same_kind'))
# Can cast safely if the integer multiplier does divide
assert_(np.can_cast('M8[6h]', 'M8[3h]', casting='safe'))
# We can always cast types with generic units (corresponding to NaT) to
# more specific types
assert_(np.can_cast('m8', 'm8[h]', casting='same_kind'))
assert_(np.can_cast('m8', 'm8[h]', casting='safe'))
assert_(np.can_cast('M8', 'M8[h]', casting='same_kind'))
assert_(np.can_cast('M8', 'M8[h]', casting='safe'))
# but not the other way around
assert_(not np.can_cast('m8[h]', 'm8', casting='same_kind'))
assert_(not np.can_cast('m8[h]', 'm8', casting='safe'))
assert_(not np.can_cast('M8[h]', 'M8', casting='same_kind'))
assert_(not np.can_cast('M8[h]', 'M8', casting='safe'))
def test_compare_generic_nat(self):
# regression tests for GH6452
assert_equal(np.datetime64('NaT'),
np.datetime64('2000') + np.timedelta64('NaT'))
# nb. we may want to make NaT != NaT true in the future
with suppress_warnings() as sup:
sup.filter(FutureWarning, ".*NAT ==")
assert_(np.datetime64('NaT') == np.datetime64('NaT', 'us'))
assert_(np.datetime64('NaT', 'us') == np.datetime64('NaT'))
def test_datetime_scalar_construction(self):
# Construct with different units
assert_equal(np.datetime64('1950-03-12', 'D'),
np.datetime64('1950-03-12'))
assert_equal(np.datetime64('1950-03-12T13', 's'),
np.datetime64('1950-03-12T13', 'm'))
# Default construction means NaT
assert_equal(np.datetime64(), np.datetime64('NaT'))
# Some basic strings and repr
assert_equal(str(np.datetime64('NaT')), 'NaT')
assert_equal(repr(np.datetime64('NaT')),
"numpy.datetime64('NaT')")
assert_equal(str(np.datetime64('2011-02')), '2011-02')
assert_equal(repr(np.datetime64('2011-02')),
"numpy.datetime64('2011-02')")
# None gets constructed as NaT
assert_equal(np.datetime64(None), np.datetime64('NaT'))
# Default construction of NaT is in generic units
assert_equal(np.datetime64().dtype, np.dtype('M8'))
assert_equal(np.datetime64('NaT').dtype, np.dtype('M8'))
# Construction from integers requires a specified unit
assert_raises(ValueError, np.datetime64, 17)
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.datetime64('2000-03-18T16', 'h')
b = np.array('2000-03-18T16', dtype='M8[h]')
assert_equal(a.dtype, np.dtype('M8[h]'))
assert_equal(b.dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a), a)
assert_equal(np.datetime64(a).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(b), a)
assert_equal(np.datetime64(b).dtype, np.dtype('M8[h]'))
assert_equal(np.datetime64(a, 's'), a)
assert_equal(np.datetime64(a, 's').dtype, np.dtype('M8[s]'))
assert_equal(np.datetime64(b, 's'), a)
assert_equal(np.datetime64(b, 's').dtype, np.dtype('M8[s]'))
# Construction from datetime.date
assert_equal(np.datetime64('1945-03-25'),
np.datetime64(datetime.date(1945, 3, 25)))
assert_equal(np.datetime64('2045-03-25', 'D'),
np.datetime64(datetime.date(2045, 3, 25), 'D'))
# Construction from datetime.datetime
assert_equal(np.datetime64('1980-01-25T14:36:22.5'),
np.datetime64(datetime.datetime(1980, 1, 25,
14, 36, 22, 500000)))
# Construction with time units from a date is okay
assert_equal(np.datetime64('1920-03-13', 'h'),
np.datetime64('1920-03-13T00'))
assert_equal(np.datetime64('1920-03', 'm'),
np.datetime64('1920-03-01T00:00'))
assert_equal(np.datetime64('1920', 's'),
np.datetime64('1920-01-01T00:00:00'))
assert_equal(np.datetime64(datetime.date(2045, 3, 25), 'ms'),
np.datetime64('2045-03-25T00:00:00.000'))
# Construction with date units from a datetime is also okay
assert_equal(np.datetime64('1920-03-13T18', 'D'),
np.datetime64('1920-03-13'))
assert_equal(np.datetime64('1920-03-13T18:33:12', 'M'),
np.datetime64('1920-03'))
assert_equal(np.datetime64('1920-03-13T18:33:12.5', 'Y'),
np.datetime64('1920'))
def test_datetime_scalar_construction_timezone(self):
# verify that supplying an explicit timezone works, but is deprecated
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00Z'),
np.datetime64('2000-01-01T00'))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('2000-01-01T00-08'),
np.datetime64('2000-01-01T08'))
def test_datetime_array_find_type(self):
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('M8[M]'))
# at the moment, we don't automatically convert these to datetime64
dt = datetime.date(1970, 1, 1)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([dt])
assert_equal(arr.dtype, np.dtype('O'))
# find "supertype" for non-dates and dates
b = np.bool_(True)
dt = np.datetime64('1970-01-01', 'M')
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.date(1970, 1, 1)
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
dt = datetime.datetime(1970, 1, 1, 12, 30, 40)
arr = np.array([b, dt])
assert_equal(arr.dtype, np.dtype('O'))
def test_timedelta_scalar_construction(self):
# Construct with different units
assert_equal(np.timedelta64(7, 'D'),
np.timedelta64(1, 'W'))
assert_equal(np.timedelta64(120, 's'),
np.timedelta64(2, 'm'))
# Default construction means 0
assert_equal(np.timedelta64(), np.timedelta64(0))
# None gets constructed as NaT
assert_equal(np.timedelta64(None), np.timedelta64('NaT'))
# Some basic strings and repr
assert_equal(str(np.timedelta64('NaT')), 'NaT')
assert_equal(repr(np.timedelta64('NaT')),
"numpy.timedelta64('NaT')")
assert_equal(str(np.timedelta64(3, 's')), '3 seconds')
assert_equal(repr(np.timedelta64(-3, 's')),
"numpy.timedelta64(-3,'s')")
assert_equal(repr(np.timedelta64(12)),
"numpy.timedelta64(12)")
# Construction from an integer produces generic units
assert_equal(np.timedelta64(12).dtype, np.dtype('m8'))
# When constructing from a scalar or zero-dimensional array,
# it either keeps the units or you can override them.
a = np.timedelta64(2, 'h')
b = np.array(2, dtype='m8[h]')
assert_equal(a.dtype, np.dtype('m8[h]'))
assert_equal(b.dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a), a)
assert_equal(np.timedelta64(a).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(b), a)
assert_equal(np.timedelta64(b).dtype, np.dtype('m8[h]'))
assert_equal(np.timedelta64(a, 's'), a)
assert_equal(np.timedelta64(a, 's').dtype, np.dtype('m8[s]'))
assert_equal(np.timedelta64(b, 's'), a)
assert_equal(np.timedelta64(b, 's').dtype, np.dtype('m8[s]'))
# Construction from datetime.timedelta
assert_equal(np.timedelta64(5, 'D'),
np.timedelta64(datetime.timedelta(days=5)))
assert_equal(np.timedelta64(102347621, 's'),
np.timedelta64(datetime.timedelta(seconds=102347621)))
assert_equal(np.timedelta64(-10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=-10234760000)))
assert_equal(np.timedelta64(10234760000, 'us'),
np.timedelta64(datetime.timedelta(
microseconds=10234760000)))
assert_equal(np.timedelta64(1023476, 'ms'),
np.timedelta64(datetime.timedelta(milliseconds=1023476)))
assert_equal(np.timedelta64(10, 'm'),
np.timedelta64(datetime.timedelta(minutes=10)))
assert_equal(np.timedelta64(281, 'h'),
np.timedelta64(datetime.timedelta(hours=281)))
assert_equal(np.timedelta64(28, 'W'),
np.timedelta64(datetime.timedelta(weeks=28)))
# Cannot construct across nonlinear time unit boundaries
a = np.timedelta64(3, 's')
assert_raises(TypeError, np.timedelta64, a, 'M')
assert_raises(TypeError, np.timedelta64, a, 'Y')
a = np.timedelta64(6, 'M')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'h')
a = np.timedelta64(1, 'Y')
assert_raises(TypeError, np.timedelta64, a, 'D')
assert_raises(TypeError, np.timedelta64, a, 'm')
def test_timedelta_scalar_construction_units(self):
# String construction detecting units
assert_equal(np.datetime64('2010').dtype,
np.dtype('M8[Y]'))
assert_equal(np.datetime64('2010-03').dtype,
np.dtype('M8[M]'))
assert_equal(np.datetime64('2010-03-12').dtype,
np.dtype('M8[D]'))
assert_equal(np.datetime64('2010-03-12T17').dtype,
np.dtype('M8[h]'))
assert_equal(np.datetime64('2010-03-12T17:15').dtype,
np.dtype('M8[m]'))
assert_equal(np.datetime64('2010-03-12T17:15:08').dtype,
np.dtype('M8[s]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123').dtype,
np.dtype('M8[ms]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.1234').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.12345').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('2010-03-12T17:15:08.123456').dtype,
np.dtype('M8[us]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789').dtype,
np.dtype('M8[ns]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.1234567890').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.12345678901').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64('1970-01-01T00:00:02.123456789012').dtype,
np.dtype('M8[ps]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345').dtype,
np.dtype('M8[fs]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.1234567890123456').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.12345678901234567').dtype,
np.dtype('M8[as]'))
assert_equal(np.datetime64(
'1970-01-01T00:00:02.123456789012345678').dtype,
np.dtype('M8[as]'))
# Python date object
assert_equal(np.datetime64(datetime.date(2010, 4, 16)).dtype,
np.dtype('M8[D]'))
# Python datetime object
assert_equal(np.datetime64(
datetime.datetime(2010, 4, 16, 13, 45, 18)).dtype,
np.dtype('M8[us]'))
# 'today' special value
assert_equal(np.datetime64('today').dtype,
np.dtype('M8[D]'))
# 'now' special value
assert_equal(np.datetime64('now').dtype,
np.dtype('M8[s]'))
def test_datetime_nat_casting(self):
a = np.array('NaT', dtype='M8[D]')
b = np.datetime64('NaT', '[D]')
# Arrays
assert_equal(a.astype('M8[s]'), np.array('NaT', dtype='M8[s]'))
assert_equal(a.astype('M8[ms]'), np.array('NaT', dtype='M8[ms]'))
assert_equal(a.astype('M8[M]'), np.array('NaT', dtype='M8[M]'))
assert_equal(a.astype('M8[Y]'), np.array('NaT', dtype='M8[Y]'))
assert_equal(a.astype('M8[W]'), np.array('NaT', dtype='M8[W]'))
# Scalars -> Scalars
assert_equal(np.datetime64(b, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(b, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(b, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(b, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(b, '[W]'), np.datetime64('NaT', '[W]'))
# Arrays -> Scalars
assert_equal(np.datetime64(a, '[s]'), np.datetime64('NaT', '[s]'))
assert_equal(np.datetime64(a, '[ms]'), np.datetime64('NaT', '[ms]'))
assert_equal(np.datetime64(a, '[M]'), np.datetime64('NaT', '[M]'))
assert_equal(np.datetime64(a, '[Y]'), np.datetime64('NaT', '[Y]'))
assert_equal(np.datetime64(a, '[W]'), np.datetime64('NaT', '[W]'))
def test_days_creation(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 - 365)
assert_equal(np.array('1600', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3)
assert_equal(np.array('1601', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)/4 + 3 + 366)
assert_equal(np.array('1900', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4)
assert_equal(np.array('1901', dtype='M8[D]').astype('i8'),
(1900-1970)*365 - (1970-1900)//4 + 365)
assert_equal(np.array('1967', dtype='M8[D]').astype('i8'), -3*365 - 1)
assert_equal(np.array('1968', dtype='M8[D]').astype('i8'), -2*365 - 1)
assert_equal(np.array('1969', dtype='M8[D]').astype('i8'), -1*365)
assert_equal(np.array('1970', dtype='M8[D]').astype('i8'), 0*365)
assert_equal(np.array('1971', dtype='M8[D]').astype('i8'), 1*365)
assert_equal(np.array('1972', dtype='M8[D]').astype('i8'), 2*365)
assert_equal(np.array('1973', dtype='M8[D]').astype('i8'), 3*365 + 1)
assert_equal(np.array('1974', dtype='M8[D]').astype('i8'), 4*365 + 1)
assert_equal(np.array('2000', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4)
assert_equal(np.array('2001', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366)
assert_equal(np.array('2400', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3)
assert_equal(np.array('2401', dtype='M8[D]').astype('i8'),
(2400 - 1970)*365 + (2400 - 1972)//4 - 3 + 366)
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 28)
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('i8'),
(1600-1970)*365 - (1972-1600)//4 + 3 + 31 + 29)
assert_equal(np.array('2000-02-29', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 28)
assert_equal(np.array('2000-03-01', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 31 + 29)
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('i8'),
(2000 - 1970)*365 + (2000 - 1972)//4 + 366 + 31 + 28 + 21)
def test_days_to_pydate(self):
assert_equal(np.array('1599', dtype='M8[D]').astype('O'),
datetime.date(1599, 1, 1))
assert_equal(np.array('1600', dtype='M8[D]').astype('O'),
datetime.date(1600, 1, 1))
assert_equal(np.array('1601', dtype='M8[D]').astype('O'),
datetime.date(1601, 1, 1))
assert_equal(np.array('1900', dtype='M8[D]').astype('O'),
datetime.date(1900, 1, 1))
assert_equal(np.array('1901', dtype='M8[D]').astype('O'),
datetime.date(1901, 1, 1))
assert_equal(np.array('2000', dtype='M8[D]').astype('O'),
datetime.date(2000, 1, 1))
assert_equal(np.array('2001', dtype='M8[D]').astype('O'),
datetime.date(2001, 1, 1))
assert_equal(np.array('1600-02-29', dtype='M8[D]').astype('O'),
datetime.date(1600, 2, 29))
assert_equal(np.array('1600-03-01', dtype='M8[D]').astype('O'),
datetime.date(1600, 3, 1))
assert_equal(np.array('2001-03-22', dtype='M8[D]').astype('O'),
datetime.date(2001, 3, 22))
def test_dtype_comparison(self):
assert_(not (np.dtype('M8[us]') == np.dtype('M8[ms]')))
assert_(np.dtype('M8[us]') != np.dtype('M8[ms]'))
assert_(np.dtype('M8[2D]') != np.dtype('M8[D]'))
assert_(np.dtype('M8[D]') != np.dtype('M8[2D]'))
def test_pydatetime_creation(self):
a = np.array(['1960-03-12', datetime.date(1960, 3, 12)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['1999-12-31', datetime.date(1999, 12, 31)], dtype='M8[D]')
assert_equal(a[0], a[1])
a = np.array(['2000-01-01', datetime.date(2000, 1, 1)], dtype='M8[D]')
assert_equal(a[0], a[1])
# Will fail if the date changes during the exact right moment
a = np.array(['today', datetime.date.today()], dtype='M8[D]')
assert_equal(a[0], a[1])
# datetime.datetime.now() returns local time, not UTC
#a = np.array(['now', datetime.datetime.now()], dtype='M8[s]')
#assert_equal(a[0], a[1])
# we can give a datetime.date time units
assert_equal(np.array(datetime.date(1960, 3, 12), dtype='M8[s]'),
np.array(np.datetime64('1960-03-12T00:00:00')))
def test_datetime_string_conversion(self):
a = ['2011-03-16', '1920-01-01', '2013-05-19']
str_a = np.array(a, dtype='S')
dt_a = np.array(a, dtype='M')
str_b = np.empty_like(str_a)
dt_b = np.empty_like(dt_a)
# String to datetime
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
# Datetime to string
assert_equal(str_a, dt_a.astype('S0'))
str_b[...] = dt_a
assert_equal(str_a, str_b)
# Convert the 'S' to 'U'
str_a = str_a.astype('U')
str_b = str_b.astype('U')
# Unicode to datetime
assert_equal(dt_a, str_a.astype('M'))
assert_equal(dt_a.dtype, str_a.astype('M').dtype)
dt_b[...] = str_a
assert_equal(dt_a, dt_b)
# Datetime to unicode
assert_equal(str_a, dt_a.astype('U'))
str_b[...] = dt_a
assert_equal(str_a, str_b)
def test_datetime_array_str(self):
a = np.array(['2011-03-16', '1920-01-01', '2013-05-19'], dtype='M')
assert_equal(str(a), "['2011-03-16' '1920-01-01' '2013-05-19']")
a = np.array(['2011-03-16T13:55', '1920-01-01T03:12'], dtype='M')
assert_equal(np.array2string(a, separator=', ',
formatter={'datetime': lambda x:
"'%s'" % np.datetime_as_string(x, timezone='UTC')}),
"['2011-03-16T13:55Z', '1920-01-01T03:12Z']")
# Check that one NaT doesn't corrupt subsequent entries
a = np.array(['2010', 'NaT', '2030']).astype('M')
assert_equal(str(a), "['2010' 'NaT' '2030']")
def test_timedelta_array_str(self):
a = np.array([-1, 0, 100], dtype='m')
assert_equal(str(a), "[ -1 0 100]")
a = np.array(['NaT', 'NaT'], dtype='m')
assert_equal(str(a), "['NaT' 'NaT']")
# Check right-alignment with NaTs
a = np.array([-1, 'NaT', 0], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 0]")
a = np.array([-1, 'NaT', 1234567], dtype='m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
# Test with other byteorder:
a = np.array([-1, 'NaT', 1234567], dtype='>m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
a = np.array([-1, 'NaT', 1234567], dtype='<m')
assert_equal(str(a), "[ -1 'NaT' 1234567]")
def test_pickle(self):
# Check that pickle roundtripping works
dt = np.dtype('M8[7D]')
assert_equal(pickle.loads(pickle.dumps(dt)), dt)
dt = np.dtype('M8[W]')
assert_equal(pickle.loads(pickle.dumps(dt)), dt)
# Check that loading pickles from 1.6 works
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'D'\np6\n" + \
"I7\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('<M8[7D]'))
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'<'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'W'\np6\n" + \
"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('<M8[W]'))
pkl = "cnumpy\ndtype\np0\n(S'M8'\np1\nI0\nI1\ntp2\nRp3\n" + \
"(I4\nS'>'\np4\nNNNI-1\nI-1\nI0\n((dp5\n(S'us'\np6\n" + \
"I1\nI1\nI1\ntp7\ntp8\ntp9\nb."
assert_equal(pickle.loads(asbytes(pkl)), np.dtype('>M8[us]'))
def test_setstate(self):
"Verify that datetime dtype __setstate__ can handle bad arguments"
dt = np.dtype('>M8[us]')
assert_raises(ValueError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, 1))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
assert_raises(TypeError, dt.__setstate__, (4, '>', None, None, None, -1, -1, 0, ({}, 'xxx')))
assert_(dt.__reduce__()[2] == np.dtype('>M8[us]').__reduce__()[2])
def test_dtype_promotion(self):
# datetime <op> datetime computes the metadata gcd
# timedelta <op> timedelta computes the metadata gcd
for mM in ['m', 'M']:
assert_equal(
np.promote_types(np.dtype(mM+'8[2Y]'), np.dtype(mM+'8[2Y]')),
np.dtype(mM+'8[2Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[12Y]'), np.dtype(mM+'8[15Y]')),
np.dtype(mM+'8[3Y]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[62M]'), np.dtype(mM+'8[24M]')),
np.dtype(mM+'8[2M]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[1W]'), np.dtype(mM+'8[2D]')),
np.dtype(mM+'8[1D]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[W]'), np.dtype(mM+'8[13s]')),
np.dtype(mM+'8[s]'))
assert_equal(
np.promote_types(np.dtype(mM+'8[13W]'), np.dtype(mM+'8[49s]')),
np.dtype(mM+'8[7s]'))
# timedelta <op> timedelta raises when there is no reasonable gcd
assert_raises(TypeError, np.promote_types,
np.dtype('m8[Y]'), np.dtype('m8[D]'))
assert_raises(TypeError, np.promote_types,
np.dtype('m8[M]'), np.dtype('m8[W]'))
# timedelta <op> timedelta may overflow with big unit ranges
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[W]'), np.dtype('m8[fs]'))
assert_raises(OverflowError, np.promote_types,
np.dtype('m8[s]'), np.dtype('m8[as]'))
def test_cast_overflow(self):
# gh-4486
def cast():
numpy.datetime64("1971-01-01 00:00:00.000000000000000").astype("<M8[D]")
assert_raises(OverflowError, cast)
def cast2():
numpy.datetime64("2014").astype("<M8[fs]")
assert_raises(OverflowError, cast2)
def test_pyobject_roundtrip(self):
# All datetime types should be able to roundtrip through object
a = np.array([0, 0, 0, 0, 0, 0, 0, 0, 0,
-1020040340, -2942398, -1, 0, 1, 234523453, 1199164176],
dtype=np.int64)
# With date units
for unit in ['M8[D]', 'M8[W]', 'M8[M]', 'M8[Y]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01'
b[1] = '-0001-12-31'
b[2] = '0000-01-01'
b[3] = '0001-01-01'
b[4] = '1969-12-31'
b[5] = '1970-01-01'
b[6] = '9999-12-31'
b[7] = '10000-01-01'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
# With time units
for unit in ['M8[as]', 'M8[16fs]', 'M8[ps]', 'M8[us]',
'M8[300as]', 'M8[20us]']:
b = a.copy().view(dtype=unit)
b[0] = '-0001-01-01T00'
b[1] = '-0001-12-31T00'
b[2] = '0000-01-01T00'
b[3] = '0001-01-01T00'
b[4] = '1969-12-31T23:59:59.999999'
b[5] = '1970-01-01T00'
b[6] = '9999-12-31T23:59:59.999999'
b[7] = '10000-01-01T00'
b[8] = 'NaT'
assert_equal(b.astype(object).astype(unit), b,
"Error roundtripping unit %s" % unit)
def test_month_truncation(self):
# Make sure that months are truncating correctly
assert_equal(np.array('1945-03-01', dtype='M8[M]'),
np.array('1945-03-31', dtype='M8[M]'))
assert_equal(np.array('1969-11-01', dtype='M8[M]'),
np.array('1969-11-30T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1969-12-01', dtype='M8[M]'),
np.array('1969-12-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1970-01-01', dtype='M8[M]'),
np.array('1970-01-31T23:59:59.99999', dtype='M').astype('M8[M]'))
assert_equal(np.array('1980-02-01', dtype='M8[M]'),
np.array('1980-02-29T23:59:59.99999', dtype='M').astype('M8[M]'))
def test_different_unit_comparison(self):
# Check some years with date units
for unit1 in ['Y', 'M', 'D']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['Y', 'M', 'D']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945', dtype=dt1),
np.array('1945', dtype=dt2))
assert_equal(np.array('1970', dtype=dt1),
np.array('1970', dtype=dt2))
assert_equal(np.array('9999', dtype=dt1),
np.array('9999', dtype=dt2))
assert_equal(np.array('10000', dtype=dt1),
np.array('10000-01-01', dtype=dt2))
assert_equal(np.datetime64('1945', unit1),
np.datetime64('1945', unit2))
assert_equal(np.datetime64('1970', unit1),
np.datetime64('1970', unit2))
assert_equal(np.datetime64('9999', unit1),
np.datetime64('9999', unit2))
assert_equal(np.datetime64('10000', unit1),
np.datetime64('10000-01-01', unit2))
# Check some datetimes with time units
for unit1 in ['6h', 'h', 'm', 's', '10ms', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_equal(np.array('1945-03-12T18', dtype=dt1),
np.array('1945-03-12T18', dtype=dt2))
assert_equal(np.array('1970-03-12T18', dtype=dt1),
np.array('1970-03-12T18', dtype=dt2))
assert_equal(np.array('9999-03-12T18', dtype=dt1),
np.array('9999-03-12T18', dtype=dt2))
assert_equal(np.array('10000-01-01T00', dtype=dt1),
np.array('10000-01-01T00', dtype=dt2))
assert_equal(np.datetime64('1945-03-12T18', unit1),
np.datetime64('1945-03-12T18', unit2))
assert_equal(np.datetime64('1970-03-12T18', unit1),
np.datetime64('1970-03-12T18', unit2))
assert_equal(np.datetime64('9999-03-12T18', unit1),
np.datetime64('9999-03-12T18', unit2))
assert_equal(np.datetime64('10000-01-01T00', unit1),
np.datetime64('10000-01-01T00', unit2))
# Check some days with units that won't overflow
for unit1 in ['D', '12h', 'h', 'm', 's', '4s', 'ms', 'us']:
dt1 = np.dtype('M8[%s]' % unit1)
for unit2 in ['D', 'h', 'm', 's', 'ms', 'us']:
dt2 = np.dtype('M8[%s]' % unit2)
assert_(np.equal(np.array('1932-02-17', dtype='M').astype(dt1),
np.array('1932-02-17T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
assert_(np.equal(np.array('10000-04-27', dtype='M').astype(dt1),
np.array('10000-04-27T00:00:00', dtype='M').astype(dt2),
casting='unsafe'))
# Shouldn't be able to compare datetime and timedelta
# TODO: Changing to 'same_kind' or 'safe' casting in the ufuncs by
# default is needed to properly catch this kind of thing...
a = np.array('2012-12-21', dtype='M8[D]')
b = np.array(3, dtype='m8[D]')
#assert_raises(TypeError, np.less, a, b)
assert_raises(TypeError, np.less, a, b, casting='same_kind')
def test_datetime_like(self):
a = np.array([3], dtype='m8[4D]')
b = np.array(['2012-12-21'], dtype='M8[D]')
assert_equal(np.ones_like(a).dtype, a.dtype)
assert_equal(np.zeros_like(a).dtype, a.dtype)
assert_equal(np.empty_like(a).dtype, a.dtype)
assert_equal(np.ones_like(b).dtype, b.dtype)
assert_equal(np.zeros_like(b).dtype, b.dtype)
assert_equal(np.empty_like(b).dtype, b.dtype)
def test_datetime_unary(self):
for tda, tdb, tdzero, tdone, tdmone in \
[
# One-dimensional arrays
(np.array([3], dtype='m8[D]'),
np.array([-3], dtype='m8[D]'),
np.array([0], dtype='m8[D]'),
np.array([1], dtype='m8[D]'),
np.array([-1], dtype='m8[D]')),
# NumPy scalars
(np.timedelta64(3, '[D]'),
np.timedelta64(-3, '[D]'),
np.timedelta64(0, '[D]'),
np.timedelta64(1, '[D]'),
np.timedelta64(-1, '[D]'))]:
# negative ufunc
assert_equal(-tdb, tda)
assert_equal((-tdb).dtype, tda.dtype)
assert_equal(np.negative(tdb), tda)
assert_equal(np.negative(tdb).dtype, tda.dtype)
# absolute ufunc
assert_equal(np.absolute(tdb), tda)
assert_equal(np.absolute(tdb).dtype, tda.dtype)
# sign ufunc
assert_equal(np.sign(tda), tdone)
assert_equal(np.sign(tdb), tdmone)
assert_equal(np.sign(tdzero), tdzero)
assert_equal(np.sign(tda).dtype, tda.dtype)
# The ufuncs always produce native-endian results
assert_
def test_datetime_add(self):
for dta, dtb, dtc, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['2012-12-21T11'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 + 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('2012-12-21T11', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 + 11, '[h]'))]:
# m8 + m8
assert_equal(tda + tdb, tdc)
assert_equal((tda + tdb).dtype, np.dtype('m8[h]'))
# m8 + bool
assert_equal(tdb + True, tdb + 1)
assert_equal((tdb + True).dtype, np.dtype('m8[h]'))
# m8 + int
assert_equal(tdb + 3*24, tdc)
assert_equal((tdb + 3*24).dtype, np.dtype('m8[h]'))
# bool + m8
assert_equal(False + tdb, tdb)
assert_equal((False + tdb).dtype, np.dtype('m8[h]'))
# int + m8
assert_equal(3*24 + tdb, tdc)
assert_equal((3*24 + tdb).dtype, np.dtype('m8[h]'))
# M8 + bool
assert_equal(dta + True, dta + 1)
assert_equal(dtnat + True, dtnat)
assert_equal((dta + True).dtype, np.dtype('M8[D]'))
# M8 + int
assert_equal(dta + 3, dtb)
assert_equal(dtnat + 3, dtnat)
assert_equal((dta + 3).dtype, np.dtype('M8[D]'))
# bool + M8
assert_equal(False + dta, dta)
assert_equal(False + dtnat, dtnat)
assert_equal((False + dta).dtype, np.dtype('M8[D]'))
# int + M8
assert_equal(3 + dta, dtb)
assert_equal(3 + dtnat, dtnat)
assert_equal((3 + dta).dtype, np.dtype('M8[D]'))
# M8 + m8
assert_equal(dta + tda, dtb)
assert_equal(dtnat + tda, dtnat)
assert_equal((dta + tda).dtype, np.dtype('M8[D]'))
# m8 + M8
assert_equal(tda + dta, dtb)
assert_equal(tda + dtnat, dtnat)
assert_equal((tda + dta).dtype, np.dtype('M8[D]'))
# In M8 + m8, the result goes to higher precision
assert_equal(np.add(dta, tdb, casting='unsafe'), dtc)
assert_equal(np.add(dta, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
assert_equal(np.add(tdb, dta, casting='unsafe'), dtc)
assert_equal(np.add(tdb, dta, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 + M8
assert_raises(TypeError, np.add, dta, dtb)
def test_datetime_subtract(self):
for dta, dtb, dtc, dtd, dte, dtnat, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array(['2012-12-24'], dtype='M8[D]'),
np.array(['1940-12-24'], dtype='M8[D]'),
np.array(['1940-12-24T00'], dtype='M8[h]'),
np.array(['1940-12-23T13'], dtype='M8[h]'),
np.array(['NaT'], dtype='M8[D]'),
np.array([3], dtype='m8[D]'),
np.array([11], dtype='m8[h]'),
np.array([3*24 - 11], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.datetime64('2012-12-24', '[D]'),
np.datetime64('1940-12-24', '[D]'),
np.datetime64('1940-12-24T00', '[h]'),
np.datetime64('1940-12-23T13', '[h]'),
np.datetime64('NaT', '[D]'),
np.timedelta64(3, '[D]'),
np.timedelta64(11, '[h]'),
np.timedelta64(3*24 - 11, '[h]'))]:
# m8 - m8
assert_equal(tda - tdb, tdc)
assert_equal((tda - tdb).dtype, np.dtype('m8[h]'))
assert_equal(tdb - tda, -tdc)
assert_equal((tdb - tda).dtype, np.dtype('m8[h]'))
# m8 - bool
assert_equal(tdc - True, tdc - 1)
assert_equal((tdc - True).dtype, np.dtype('m8[h]'))
# m8 - int
assert_equal(tdc - 3*24, -tdb)
assert_equal((tdc - 3*24).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(False - tdb, -tdb)
assert_equal((False - tdb).dtype, np.dtype('m8[h]'))
# int - m8
assert_equal(3*24 - tdb, tdc)
assert_equal((3*24 - tdb).dtype, np.dtype('m8[h]'))
# M8 - bool
assert_equal(dtb - True, dtb - 1)
assert_equal(dtnat - True, dtnat)
assert_equal((dtb - True).dtype, np.dtype('M8[D]'))
# M8 - int
assert_equal(dtb - 3, dta)
assert_equal(dtnat - 3, dtnat)
assert_equal((dtb - 3).dtype, np.dtype('M8[D]'))
# M8 - m8
assert_equal(dtb - tda, dta)
assert_equal(dtnat - tda, dtnat)
assert_equal((dtb - tda).dtype, np.dtype('M8[D]'))
# In M8 - m8, the result goes to higher precision
assert_equal(np.subtract(dtc, tdb, casting='unsafe'), dte)
assert_equal(np.subtract(dtc, tdb, casting='unsafe').dtype,
np.dtype('M8[h]'))
# M8 - M8 with different goes to higher precision
assert_equal(np.subtract(dtc, dtd, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtc, dtd, casting='unsafe').dtype,
np.dtype('m8[h]'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe'),
np.timedelta64(0, 'h'))
assert_equal(np.subtract(dtd, dtc, casting='unsafe').dtype,
np.dtype('m8[h]'))
# m8 - M8
assert_raises(TypeError, np.subtract, tda, dta)
# bool - M8
assert_raises(TypeError, np.subtract, False, dta)
# int - M8
assert_raises(TypeError, np.subtract, 3, dta)
def test_datetime_multiply(self):
for dta, tda, tdb, tdc in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'))]:
# m8 * int
assert_equal(tda * 2, tdc)
assert_equal((tda * 2).dtype, np.dtype('m8[h]'))
# int * m8
assert_equal(2 * tda, tdc)
assert_equal((2 * tda).dtype, np.dtype('m8[h]'))
# m8 * float
assert_equal(tda * 1.5, tdb)
assert_equal((tda * 1.5).dtype, np.dtype('m8[h]'))
# float * m8
assert_equal(1.5 * tda, tdb)
assert_equal((1.5 * tda).dtype, np.dtype('m8[h]'))
# m8 * m8
assert_raises(TypeError, np.multiply, tda, tdb)
# m8 * M8
assert_raises(TypeError, np.multiply, dta, tda)
# M8 * m8
assert_raises(TypeError, np.multiply, tda, dta)
# M8 * int
assert_raises(TypeError, np.multiply, dta, 2)
# int * M8
assert_raises(TypeError, np.multiply, 2, dta)
# M8 * float
assert_raises(TypeError, np.multiply, dta, 1.5)
# float * M8
assert_raises(TypeError, np.multiply, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, "invalid value encountered in multiply")
nat = np.timedelta64('NaT')
def check(a, b, res):
assert_equal(a * b, res)
assert_equal(b * a, res)
for tp in (int, float):
check(nat, tp(2), nat)
check(nat, tp(0), nat)
for f in (float('inf'), float('nan')):
check(np.timedelta64(1), f, nat)
check(np.timedelta64(0), f, nat)
check(nat, f, nat)
def test_datetime_divide(self):
for dta, tda, tdb, tdc, tdd in \
[
# One-dimensional arrays
(np.array(['2012-12-21'], dtype='M8[D]'),
np.array([6], dtype='m8[h]'),
np.array([9], dtype='m8[h]'),
np.array([12], dtype='m8[h]'),
np.array([6], dtype='m8[m]')),
# NumPy scalars
(np.datetime64('2012-12-21', '[D]'),
np.timedelta64(6, '[h]'),
np.timedelta64(9, '[h]'),
np.timedelta64(12, '[h]'),
np.timedelta64(6, '[m]'))]:
# m8 / int
assert_equal(tdc / 2, tda)
assert_equal((tdc / 2).dtype, np.dtype('m8[h]'))
# m8 / float
assert_equal(tda / 0.5, tdc)
assert_equal((tda / 0.5).dtype, np.dtype('m8[h]'))
# m8 / m8
assert_equal(tda / tdb, 6.0 / 9.0)
assert_equal(np.divide(tda, tdb), 6.0 / 9.0)
assert_equal(np.true_divide(tda, tdb), 6.0 / 9.0)
assert_equal(tdb / tda, 9.0 / 6.0)
assert_equal((tda / tdb).dtype, np.dtype('f8'))
assert_equal(tda / tdd, 60.0)
assert_equal(tdd / tda, 1.0 / 60.0)
# m8 // m8
assert_raises(TypeError, np.floor_divide, tda, tdb)
# int / m8
assert_raises(TypeError, np.divide, 2, tdb)
# float / m8
assert_raises(TypeError, np.divide, 0.5, tdb)
# m8 / M8
assert_raises(TypeError, np.divide, dta, tda)
# M8 / m8
assert_raises(TypeError, np.divide, tda, dta)
# M8 / int
assert_raises(TypeError, np.divide, dta, 2)
# int / M8
assert_raises(TypeError, np.divide, 2, dta)
# M8 / float
assert_raises(TypeError, np.divide, dta, 1.5)
# float / M8
assert_raises(TypeError, np.divide, 1.5, dta)
# NaTs
with suppress_warnings() as sup:
sup.filter(RuntimeWarning, r".*encountered in true\_divide")
nat = np.timedelta64('NaT')
for tp in (int, float):
assert_equal(np.timedelta64(1) / tp(0), nat)
assert_equal(np.timedelta64(0) / tp(0), nat)
assert_equal(nat / tp(0), nat)
assert_equal(nat / tp(2), nat)
# Division by inf
assert_equal(np.timedelta64(1) / float('inf'), np.timedelta64(0))
assert_equal(np.timedelta64(0) / float('inf'), np.timedelta64(0))
assert_equal(nat / float('inf'), nat)
# Division by nan
assert_equal(np.timedelta64(1) / float('nan'), nat)
assert_equal(np.timedelta64(0) / float('nan'), nat)
assert_equal(nat / float('nan'), nat)
def test_datetime_compare(self):
# Test all the comparison operators
a = np.datetime64('2000-03-12T18:00:00.000000')
b = np.array(['2000-03-12T18:00:00.000000',
'2000-03-12T17:59:59.999999',
'2000-03-12T18:00:00.000001',
'1970-01-11T12:00:00.909090',
'2016-01-11T12:00:00.909090'],
dtype='datetime64[us]')
assert_equal(np.equal(a, b), [1, 0, 0, 0, 0])
assert_equal(np.not_equal(a, b), [0, 1, 1, 1, 1])
assert_equal(np.less(a, b), [0, 0, 1, 0, 1])
assert_equal(np.less_equal(a, b), [1, 0, 1, 0, 1])
assert_equal(np.greater(a, b), [0, 1, 0, 1, 0])
assert_equal(np.greater_equal(a, b), [1, 1, 0, 1, 0])
def test_datetime_compare_nat(self):
dt_nat = np.datetime64('NaT', 'D')
dt_other = np.datetime64('2000-01-01')
td_nat = np.timedelta64('NaT', 'h')
td_other = np.timedelta64(1, 'h')
with suppress_warnings() as sup:
# The assert warns contexts will again see the warning:
sup.filter(FutureWarning, ".*NAT")
for op in [np.equal, np.less, np.less_equal,
np.greater, np.greater_equal]:
if op(dt_nat, dt_nat):
assert_warns(FutureWarning, op, dt_nat, dt_nat)
if op(dt_nat, dt_other):
assert_warns(FutureWarning, op, dt_nat, dt_other)
if op(dt_other, dt_nat):
assert_warns(FutureWarning, op, dt_other, dt_nat)
if op(td_nat, td_nat):
assert_warns(FutureWarning, op, td_nat, td_nat)
if op(td_nat, td_other):
assert_warns(FutureWarning, op, td_nat, td_other)
if op(td_other, td_nat):
assert_warns(FutureWarning, op, td_other, td_nat)
assert_warns(FutureWarning, np.not_equal, dt_nat, dt_nat)
assert_(np.not_equal(dt_nat, dt_other))
assert_(np.not_equal(dt_other, dt_nat))
assert_warns(FutureWarning, np.not_equal, td_nat, td_nat)
assert_(np.not_equal(td_nat, td_other))
assert_(np.not_equal(td_other, td_nat))
def test_datetime_minmax(self):
# The metadata of the result should become the GCD
# of the operand metadata
a = np.array('1999-03-12T13', dtype='M8[2m]')
b = np.array('1999-03-12T12', dtype='M8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('M8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('M8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# Interaction with NaT
a = np.array('1999-03-12T13', dtype='M8[2m]')
dtnat = np.array('NaT', dtype='M8[h]')
assert_equal(np.minimum(a, dtnat), a)
assert_equal(np.minimum(dtnat, a), a)
assert_equal(np.maximum(a, dtnat), a)
assert_equal(np.maximum(dtnat, a), a)
# Also do timedelta
a = np.array(3, dtype='m8[h]')
b = np.array(3*3600 - 3, dtype='m8[s]')
assert_equal(np.minimum(a, b), b)
assert_equal(np.minimum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmin(a, b), b)
assert_equal(np.fmin(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum(a, b), a)
assert_equal(np.maximum(a, b).dtype, np.dtype('m8[s]'))
assert_equal(np.fmax(a, b), a)
assert_equal(np.fmax(a, b).dtype, np.dtype('m8[s]'))
# Viewed as integers, the comparison is opposite because
# of the units chosen
assert_equal(np.minimum(a.view('i8'), b.view('i8')), a.view('i8'))
# should raise between datetime and timedelta
#
# TODO: Allowing unsafe casting by
# default in ufuncs strikes again... :(
a = np.array(3, dtype='m8[h]')
b = np.array('1999-03-12T12', dtype='M8[s]')
#assert_raises(TypeError, np.minimum, a, b)
#assert_raises(TypeError, np.maximum, a, b)
#assert_raises(TypeError, np.fmin, a, b)
#assert_raises(TypeError, np.fmax, a, b)
assert_raises(TypeError, np.minimum, a, b, casting='same_kind')
assert_raises(TypeError, np.maximum, a, b, casting='same_kind')
assert_raises(TypeError, np.fmin, a, b, casting='same_kind')
assert_raises(TypeError, np.fmax, a, b, casting='same_kind')
def test_hours(self):
t = np.ones(3, dtype='M8[s]')
t[0] = 60*60*24 + 60*60*10
assert_(t[0].item().hour == 10)
def test_divisor_conversion_year(self):
assert_(np.dtype('M8[Y/4]') == np.dtype('M8[3M]'))
assert_(np.dtype('M8[Y/13]') == np.dtype('M8[4W]'))
assert_(np.dtype('M8[3Y/73]') == np.dtype('M8[15D]'))
def test_divisor_conversion_month(self):
assert_(np.dtype('M8[M/2]') == np.dtype('M8[2W]'))
assert_(np.dtype('M8[M/15]') == np.dtype('M8[2D]'))
assert_(np.dtype('M8[3M/40]') == np.dtype('M8[54h]'))
def test_divisor_conversion_week(self):
assert_(np.dtype('m8[W/7]') == np.dtype('m8[D]'))
assert_(np.dtype('m8[3W/14]') == np.dtype('m8[36h]'))
assert_(np.dtype('m8[5W/140]') == np.dtype('m8[360m]'))
def test_divisor_conversion_day(self):
assert_(np.dtype('M8[D/12]') == np.dtype('M8[2h]'))
assert_(np.dtype('M8[D/120]') == np.dtype('M8[12m]'))
assert_(np.dtype('M8[3D/960]') == np.dtype('M8[270s]'))
def test_divisor_conversion_hour(self):
assert_(np.dtype('m8[h/30]') == np.dtype('m8[2m]'))
assert_(np.dtype('m8[3h/300]') == np.dtype('m8[36s]'))
def test_divisor_conversion_minute(self):
assert_(np.dtype('m8[m/30]') == np.dtype('m8[2s]'))
assert_(np.dtype('m8[3m/300]') == np.dtype('m8[600ms]'))
def test_divisor_conversion_second(self):
assert_(np.dtype('m8[s/100]') == np.dtype('m8[10ms]'))
assert_(np.dtype('m8[3s/10000]') == np.dtype('m8[300us]'))
def test_divisor_conversion_fs(self):
assert_(np.dtype('M8[fs/100]') == np.dtype('M8[10as]'))
self.assertRaises(ValueError, lambda: np.dtype('M8[3fs/10000]'))
def test_divisor_conversion_as(self):
self.assertRaises(ValueError, lambda: np.dtype('M8[as/10]'))
def test_string_parser_variants(self):
# Allow space instead of 'T' between date and time
assert_equal(np.array(['1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 01:02:03'], np.dtype('M8[s]')))
# Allow negative years
assert_equal(np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03'], np.dtype('M8[s]')))
# UTC specifier
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['-1980-02-29T01:02:03'], np.dtype('M8[s]')),
np.array(['-1980-02-29 01:02:03Z'], np.dtype('M8[s]')))
# Time zone offset
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:02:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03-0130'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-28T22:32:03'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:02:03+01:30'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(
np.array(['1980-02-29T02:32:03.506'], np.dtype('M8[s]')),
np.array(['1980-02-29 00:32:03.506-02'], np.dtype('M8[s]')))
with assert_warns(DeprecationWarning):
assert_equal(np.datetime64('1977-03-02T12:30-0230'),
np.datetime64('1977-03-02T15:00'))
def test_string_parser_error_check(self):
# Arbitrary bad string
assert_raises(ValueError, np.array, ['badvalue'], np.dtype('M8[us]'))
# Character after year must be '-'
assert_raises(ValueError, np.array, ['1980X'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-'], np.dtype('M8[us]'))
# Month must be in range [1,12]
assert_raises(ValueError, np.array, ['1980-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-13'], np.dtype('M8[us]'))
# Month must have two digits
assert_raises(ValueError, np.array, ['1980-1'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-1-02'], np.dtype('M8[us]'))
# 'Mor' is not a valid month
assert_raises(ValueError, np.array, ['1980-Mor'], np.dtype('M8[us]'))
# Cannot have trailing '-'
assert_raises(ValueError, np.array, ['1980-01-'], np.dtype('M8[us]'))
# Day must be in range [1,len(month)]
assert_raises(ValueError, np.array, ['1980-01-0'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-00'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-01-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1979-02-29'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-30'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-03-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-04-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-05-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-06-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-07-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-08-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-09-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-10-32'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-11-31'], np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-12-32'], np.dtype('M8[us]'))
# Cannot have trailing characters
assert_raises(ValueError, np.array, ['1980-02-03%'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 q'],
np.dtype('M8[us]'))
# Hours must be in range [0, 23]
assert_raises(ValueError, np.array, ['1980-02-03 25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T25'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03T24:01'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 -1'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:'],
np.dtype('M8[us]'))
# Minutes must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:60'],
np.dtype('M8[us]'))
# No trailing ':'
assert_raises(ValueError, np.array, ['1980-02-03 01:60:'],
np.dtype('M8[us]'))
# Seconds must be in range [0, 59]
assert_raises(ValueError, np.array, ['1980-02-03 01:10:-1'],
np.dtype('M8[us]'))
assert_raises(ValueError, np.array, ['1980-02-03 01:01:60'],
np.dtype('M8[us]'))
# Timezone offset must within a reasonable range
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+0661'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00+2500'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-0070'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-3000'],
np.dtype('M8[us]'))
with assert_warns(DeprecationWarning):
assert_raises(ValueError, np.array, ['1980-02-03 01:01:00-25:00'],
np.dtype('M8[us]'))
def test_creation_overflow(self):
date = '1980-03-23 20:00:00'
timesteps = np.array([date], dtype='datetime64[s]')[0].astype(np.int64)
for unit in ['ms', 'us', 'ns']:
timesteps *= 1000
x = np.array([date], dtype='datetime64[%s]' % unit)
assert_equal(timesteps, x[0].astype(np.int64),
err_msg='Datetime conversion error for unit %s' % unit)
assert_equal(x[0].astype(np.int64), 322689600000000000)
def test_datetime_as_string(self):
# Check all the units with default string conversion
date = '1959-10-13'
datetime = '1959-10-13T12:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(date, 'Y')),
'1959')
assert_equal(np.datetime_as_string(np.datetime64(date, 'M')),
'1959-10')
assert_equal(np.datetime_as_string(np.datetime64(date, 'D')),
'1959-10-13')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'h')),
'1959-10-13T12')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'm')),
'1959-10-13T12:34')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 's')),
'1959-10-13T12:34:56')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ms')),
'1959-10-13T12:34:56.789')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'us')),
'1959-10-13T12:34:56.789012')
datetime = '1969-12-31T23:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1969-12-31T23:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1969-12-31T23:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1969-12-31T23:34:56.789012345678901')
datetime = '1969-12-31T23:59:57.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
datetime = '1970-01-01T00:34:56.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ns')),
'1970-01-01T00:34:56.789012345')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'ps')),
'1970-01-01T00:34:56.789012345678')
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'fs')),
'1970-01-01T00:34:56.789012345678901')
datetime = '1970-01-01T00:00:05.789012345678901234'
assert_equal(np.datetime_as_string(np.datetime64(datetime, 'as')),
datetime)
# String conversion with the unit= parameter
a = np.datetime64('2032-07-18T12:23:34.123456', 'us')
assert_equal(np.datetime_as_string(a, unit='Y', casting='unsafe'),
'2032')
assert_equal(np.datetime_as_string(a, unit='M', casting='unsafe'),
'2032-07')
assert_equal(np.datetime_as_string(a, unit='W', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='D', casting='unsafe'),
'2032-07-18')
assert_equal(np.datetime_as_string(a, unit='h'), '2032-07-18T12')
assert_equal(np.datetime_as_string(a, unit='m'),
'2032-07-18T12:23')
assert_equal(np.datetime_as_string(a, unit='s'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(a, unit='ms'),
'2032-07-18T12:23:34.123')
assert_equal(np.datetime_as_string(a, unit='us'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(a, unit='ns'),
'2032-07-18T12:23:34.123456000')
assert_equal(np.datetime_as_string(a, unit='ps'),
'2032-07-18T12:23:34.123456000000')
assert_equal(np.datetime_as_string(a, unit='fs'),
'2032-07-18T12:23:34.123456000000000')
assert_equal(np.datetime_as_string(a, unit='as'),
'2032-07-18T12:23:34.123456000000000000')
# unit='auto' parameter
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.123456', 'us'), unit='auto'),
'2032-07-18T12:23:34.123456')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34.12', 'us'), unit='auto'),
'2032-07-18T12:23:34.120')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:34', 'us'), unit='auto'),
'2032-07-18T12:23:34')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:23:00', 'us'), unit='auto'),
'2032-07-18T12:23')
# 'auto' doesn't split up hour and minute
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T12:00:00', 'us'), unit='auto'),
'2032-07-18T12:00')
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-18T00:00:00', 'us'), unit='auto'),
'2032-07-18')
# 'auto' doesn't split up the date
assert_equal(np.datetime_as_string(
np.datetime64('2032-07-01T00:00:00', 'us'), unit='auto'),
'2032-07-01')
assert_equal(np.datetime_as_string(
np.datetime64('2032-01-01T00:00:00', 'us'), unit='auto'),
'2032-01-01')
@dec.skipif(not _has_pytz, "The pytz module is not available.")
def test_datetime_as_string_timezone(self):
# timezone='local' vs 'UTC'
a = np.datetime64('2010-03-15T06:30', 'm')
assert_equal(np.datetime_as_string(a),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='naive'),
'2010-03-15T06:30')
assert_equal(np.datetime_as_string(a, timezone='UTC'),
'2010-03-15T06:30Z')
assert_(np.datetime_as_string(a, timezone='local') !=
'2010-03-15T06:30')
b = np.datetime64('2010-02-15T06:30', 'm')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Central')),
'2010-03-15T01:30-0500')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Eastern')),
'2010-03-15T02:30-0400')
assert_equal(np.datetime_as_string(a, timezone=tz('US/Pacific')),
'2010-03-14T23:30-0700')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Central')),
'2010-02-15T00:30-0600')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Eastern')),
'2010-02-15T01:30-0500')
assert_equal(np.datetime_as_string(b, timezone=tz('US/Pacific')),
'2010-02-14T22:30-0800')
# Dates to strings with a timezone attached is disabled by default
assert_raises(TypeError, np.datetime_as_string, a, unit='D',
timezone=tz('US/Pacific'))
# Check that we can print out the date in the specified time zone
assert_equal(np.datetime_as_string(a, unit='D',
timezone=tz('US/Pacific'), casting='unsafe'),
'2010-03-14')
assert_equal(np.datetime_as_string(b, unit='D',
timezone=tz('US/Central'), casting='unsafe'),
'2010-02-15')
def test_datetime_arange(self):
# With two datetimes provided as strings
a = np.arange('2010-01-05', '2010-01-10', dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['2010-01-05', '2010-01-06', '2010-01-07',
'2010-01-08', '2010-01-09'], dtype='M8[D]'))
a = np.arange('1950-02-10', '1950-02-06', -1, dtype='M8[D]')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.array(['1950-02-10', '1950-02-09', '1950-02-08',
'1950-02-07'], dtype='M8[D]'))
# Unit should be detected as months here
a = np.arange('1969-05', '1970-05', 2, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[M]'))
assert_equal(a,
np.datetime64('1969-05') + np.arange(12, step=2))
# datetime, integer|timedelta works as well
# produces arange (start, start + stop) in this case
a = np.arange('1969', 18, 3, dtype='M8')
assert_equal(a.dtype, np.dtype('M8[Y]'))
assert_equal(a,
np.datetime64('1969') + np.arange(18, step=3))
a = np.arange('1969-12-19', 22, np.timedelta64(2), dtype='M8')
assert_equal(a.dtype, np.dtype('M8[D]'))
assert_equal(a,
np.datetime64('1969-12-19') + np.arange(22, step=2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.datetime64('today'),
np.datetime64('today') + 3, 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.datetime64('2011-03-01', 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange,
np.datetime64('2012-02-03T14', 's'),
np.timedelta64(5, 'Y'))
def test_datetime_arange_no_dtype(self):
d = np.array('2010-01-04', dtype="M8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_timedelta_arange(self):
a = np.arange(3, 10, dtype='m8')
assert_equal(a.dtype, np.dtype('m8'))
assert_equal(a, np.timedelta64(0) + np.arange(3, 10))
a = np.arange(np.timedelta64(3, 's'), 10, 2, dtype='m8')
assert_equal(a.dtype, np.dtype('m8[s]'))
assert_equal(a, np.timedelta64(0, 's') + np.arange(3, 10, 2))
# Step of 0 is disallowed
assert_raises(ValueError, np.arange, np.timedelta64(0),
np.timedelta64(5), 0)
# Promotion across nonlinear unit boundaries is disallowed
assert_raises(TypeError, np.arange, np.timedelta64(0, 'D'),
np.timedelta64(5, 'M'))
assert_raises(TypeError, np.arange, np.timedelta64(0, 'Y'),
np.timedelta64(5, 'D'))
def test_timedelta_arange_no_dtype(self):
d = np.array(5, dtype="m8[D]")
assert_equal(np.arange(d, d + 1), d)
assert_raises(ValueError, np.arange, d)
def test_datetime_maximum_reduce(self):
a = np.array(['2010-01-02', '1999-03-14', '1833-03'], dtype='M8[D]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('M8[D]'))
assert_equal(np.maximum.reduce(a),
np.datetime64('2010-01-02'))
a = np.array([1, 4, 0, 7, 2], dtype='m8[s]')
assert_equal(np.maximum.reduce(a).dtype, np.dtype('m8[s]'))
assert_equal(np.maximum.reduce(a),
np.timedelta64(7, 's'))
def test_datetime_busday_offset(self):
# First Monday in June
assert_equal(
np.busday_offset('2011-06', 0, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-06'))
# Last Monday in June
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
assert_equal(
np.busday_offset('2011-07', -1, roll='forward', weekmask='Mon'),
np.datetime64('2011-06-27'))
# Default M-F business days, different roll modes
assert_equal(np.busday_offset('2010-08', 0, roll='backward'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='preceding'),
np.datetime64('2010-07-30'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedpreceding'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='modifiedfollowing'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='forward'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-08', 0, roll='following'),
np.datetime64('2010-08-02'))
assert_equal(np.busday_offset('2010-10-30', 0, roll='following'),
np.datetime64('2010-11-01'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-30', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-29'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedfollowing'),
np.datetime64('2010-10-18'))
assert_equal(
np.busday_offset('2010-10-16', 0, roll='modifiedpreceding'),
np.datetime64('2010-10-15'))
# roll='raise' by default
assert_raises(ValueError, np.busday_offset, '2011-06-04', 0)
# Bigger offset values
assert_equal(np.busday_offset('2006-02-01', 25),
np.datetime64('2006-03-08'))
assert_equal(np.busday_offset('2006-03-08', -25),
np.datetime64('2006-02-01'))
assert_equal(np.busday_offset('2007-02-25', 11, weekmask='SatSun'),
np.datetime64('2007-04-07'))
assert_equal(np.busday_offset('2007-04-07', -11, weekmask='SatSun'),
np.datetime64('2007-02-25'))
# NaT values when roll is not raise
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='nat'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='following'),
np.datetime64('NaT'))
assert_equal(np.busday_offset(np.datetime64('NaT'), 1, roll='preceding'),
np.datetime64('NaT'))
def test_datetime_busdaycalendar(self):
# Check that it removes NaT, duplicates, and weekends
# and sorts the result.
bdd = np.busdaycalendar(
holidays=['NaT', '2011-01-17', '2011-03-06', 'NaT',
'2011-12-26', '2011-05-30', '2011-01-17'])
assert_equal(bdd.holidays,
np.array(['2011-01-17', '2011-05-30', '2011-12-26'], dtype='M8'))
# Default M-F weekmask
assert_equal(bdd.weekmask, np.array([1, 1, 1, 1, 1, 0, 0], dtype='?'))
# Check string weekmask with varying whitespace.
bdd = np.busdaycalendar(weekmask="Sun TueWed Thu\tFri")
assert_equal(bdd.weekmask, np.array([0, 1, 1, 1, 1, 0, 1], dtype='?'))
# Check length 7 0/1 string
bdd = np.busdaycalendar(weekmask="0011001")
assert_equal(bdd.weekmask, np.array([0, 0, 1, 1, 0, 0, 1], dtype='?'))
# Check length 7 string weekmask.
bdd = np.busdaycalendar(weekmask="Mon Tue")
assert_equal(bdd.weekmask, np.array([1, 1, 0, 0, 0, 0, 0], dtype='?'))
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask=[0, 0, 0, 0, 0, 0, 0])
# weekday names must be correct case
assert_raises(ValueError, np.busdaycalendar, weekmask="satsun")
# All-zeros weekmask should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="")
# Invalid weekday name codes should raise
assert_raises(ValueError, np.busdaycalendar, weekmask="Mon Tue We")
assert_raises(ValueError, np.busdaycalendar, weekmask="Max")
assert_raises(ValueError, np.busdaycalendar, weekmask="Monday Tue")
def test_datetime_busday_holidays_offset(self):
# With exactly one holiday
assert_equal(
np.busday_offset('2011-11-10', 1, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-04', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-10', 5, holidays=['2011-11-11']),
np.datetime64('2011-11-18'))
assert_equal(
np.busday_offset('2011-11-14', -1, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-18', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-10'))
assert_equal(
np.busday_offset('2011-11-14', -5, holidays=['2011-11-11']),
np.datetime64('2011-11-04'))
# With the holiday appearing twice
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-11']),
np.datetime64('2011-11-10'))
# With a NaT holiday
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', 'NaT']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['NaT', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# With another holiday before
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11']),
np.datetime64('2011-11-10'))
# With another holiday before and after
assert_equal(
np.busday_offset('2011-11-10', 1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-14'))
assert_equal(
np.busday_offset('2011-11-14', -1,
holidays=['2011-10-10', '2011-11-11', '2011-11-24']),
np.datetime64('2011-11-10'))
# A bigger forward jump across more than one week/holiday
holidays = ['2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21',
'2011-12-26', '2012-01-02']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
assert_equal(
np.busday_offset('2011-10-03', 4, holidays=holidays),
np.busday_offset('2011-10-03', 4))
assert_equal(
np.busday_offset('2011-10-03', 5, holidays=holidays),
np.busday_offset('2011-10-03', 5 + 1))
assert_equal(
np.busday_offset('2011-10-03', 27, holidays=holidays),
np.busday_offset('2011-10-03', 27 + 1))
assert_equal(
np.busday_offset('2011-10-03', 28, holidays=holidays),
np.busday_offset('2011-10-03', 28 + 2))
assert_equal(
np.busday_offset('2011-10-03', 35, holidays=holidays),
np.busday_offset('2011-10-03', 35 + 2))
assert_equal(
np.busday_offset('2011-10-03', 36, holidays=holidays),
np.busday_offset('2011-10-03', 36 + 3))
assert_equal(
np.busday_offset('2011-10-03', 56, holidays=holidays),
np.busday_offset('2011-10-03', 56 + 3))
assert_equal(
np.busday_offset('2011-10-03', 57, holidays=holidays),
np.busday_offset('2011-10-03', 57 + 4))
assert_equal(
np.busday_offset('2011-10-03', 60, holidays=holidays),
np.busday_offset('2011-10-03', 60 + 4))
assert_equal(
np.busday_offset('2011-10-03', 61, holidays=holidays),
np.busday_offset('2011-10-03', 61 + 5))
assert_equal(
np.busday_offset('2011-10-03', 61, busdaycal=bdd),
np.busday_offset('2011-10-03', 61 + 5))
# A bigger backward jump across more than one week/holiday
assert_equal(
np.busday_offset('2012-01-03', -1, holidays=holidays),
np.busday_offset('2012-01-03', -1 - 1))
assert_equal(
np.busday_offset('2012-01-03', -4, holidays=holidays),
np.busday_offset('2012-01-03', -4 - 1))
assert_equal(
np.busday_offset('2012-01-03', -5, holidays=holidays),
np.busday_offset('2012-01-03', -5 - 2))
assert_equal(
np.busday_offset('2012-01-03', -25, holidays=holidays),
np.busday_offset('2012-01-03', -25 - 2))
assert_equal(
np.busday_offset('2012-01-03', -26, holidays=holidays),
np.busday_offset('2012-01-03', -26 - 3))
assert_equal(
np.busday_offset('2012-01-03', -33, holidays=holidays),
np.busday_offset('2012-01-03', -33 - 3))
assert_equal(
np.busday_offset('2012-01-03', -34, holidays=holidays),
np.busday_offset('2012-01-03', -34 - 4))
assert_equal(
np.busday_offset('2012-01-03', -56, holidays=holidays),
np.busday_offset('2012-01-03', -56 - 4))
assert_equal(
np.busday_offset('2012-01-03', -57, holidays=holidays),
np.busday_offset('2012-01-03', -57 - 5))
assert_equal(
np.busday_offset('2012-01-03', -57, busdaycal=bdd),
np.busday_offset('2012-01-03', -57 - 5))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', -15,
holidays=holidays, busdaycal=bdd)
# Roll with the holidays
assert_equal(
np.busday_offset('2011-12-25', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='forward', holidays=holidays),
np.datetime64('2011-12-27'))
assert_equal(
np.busday_offset('2011-12-26', 0,
roll='backward', holidays=holidays),
np.datetime64('2011-12-23'))
assert_equal(
np.busday_offset('2012-02-27', 0,
roll='modifiedfollowing',
holidays=['2012-02-27', '2012-02-26', '2012-02-28',
'2012-03-01', '2012-02-29']),
np.datetime64('2012-02-24'))
assert_equal(
np.busday_offset('2012-03-06', 0,
roll='modifiedpreceding',
holidays=['2012-03-02', '2012-03-03', '2012-03-01',
'2012-03-05', '2012-03-07', '2012-03-06']),
np.datetime64('2012-03-08'))
def test_datetime_busday_holidays_count(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Validate against busday_offset broadcast against
# a range of offsets
dates = np.busday_offset('2011-01-01', np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count('2011-01-01', dates, busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count(dates, '2011-01-01', busdaycal=bdd),
-np.arange(366))
dates = np.busday_offset('2011-12-31', -np.arange(366),
roll='forward', busdaycal=bdd)
assert_equal(np.busday_count(dates, '2011-12-31', busdaycal=bdd),
np.arange(366))
# Returns negative value when reversed
assert_equal(np.busday_count('2011-12-31', dates, busdaycal=bdd),
-np.arange(366))
# Can't supply both a weekmask/holidays and busdaycal
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
weekmask='1111100', busdaycal=bdd)
assert_raises(ValueError, np.busday_offset, '2012-01-03', '2012-02-03',
holidays=holidays, busdaycal=bdd)
# Number of Mondays in March 2011
assert_equal(np.busday_count('2011-03', '2011-04', weekmask='Mon'), 4)
# Returns negative value when reversed
assert_equal(np.busday_count('2011-04', '2011-03', weekmask='Mon'), -4)
def test_datetime_is_busday(self):
holidays = ['2011-01-01', '2011-10-10', '2011-11-11', '2011-11-24',
'2011-12-25', '2011-05-30', '2011-02-21', '2011-01-17',
'2011-12-26', '2012-01-02', '2011-02-21', '2011-05-30',
'2011-07-01', '2011-07-04', '2011-09-05', '2011-10-10',
'NaT']
bdd = np.busdaycalendar(weekmask='1111100', holidays=holidays)
# Weekend/weekday tests
assert_equal(np.is_busday('2011-01-01'), False)
assert_equal(np.is_busday('2011-01-02'), False)
assert_equal(np.is_busday('2011-01-03'), True)
# All the holidays are not business days
assert_equal(np.is_busday(holidays, busdaycal=bdd),
np.zeros(len(holidays), dtype='?'))
def test_datetime_y2038(self):
# Test parsing on either side of the Y2038 boundary
a = np.datetime64('2038-01-19T03:14:07')
assert_equal(a.view(np.int64), 2**31 - 1)
a = np.datetime64('2038-01-19T03:14:08')
assert_equal(a.view(np.int64), 2**31)
# Test parsing on either side of the Y2038 boundary with
# a manually specified timezone offset
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:07+0100')
assert_equal(a.view(np.int64), 2**31 - 1)
with assert_warns(DeprecationWarning):
a = np.datetime64('2038-01-19T04:14:08+0100')
assert_equal(a.view(np.int64), 2**31)
# Test parsing a date after Y2038
a = np.datetime64('2038-01-20T13:21:14')
assert_equal(str(a), '2038-01-20T13:21:14')
class TestDateTimeData(TestCase):
def test_basic(self):
a = np.array(['1980-03-23'], dtype=np.datetime64)
assert_equal(np.datetime_data(a.dtype), ('D', 1))
if __name__ == "__main__":
run_module_suite()
| mit |
eirannejad/pyRevit | extensions/pyRevitTools.extension/pyRevit.tab/Toggles.panel/toggles2.stack/SectionBox.pushbutton/script.py | 1 | 1290 | """Toggles visibility of section box in current 3D view"""
from pyrevit import framework
from pyrevit import revit, DB
@revit.carryout('Toggle Section Box')
def toggle_sectionbox():
# activate the show hidden so we can collect
# all elements (visible and hidden)
activeview = revit.active_view
activeview.EnableRevealHiddenMode()
view_elements = DB.FilteredElementCollector(revit.doc, activeview.Id)\
.OfCategory(DB.BuiltInCategory.OST_SectionBox)\
.ToElements()
# find section boxes, and try toggling their visibility
# usually more than one section box shows up on the list but not
# all of them can be toggled. Whichever that can be toggled,
# belongs to this view
for sec_box in [x for x in view_elements
if x.CanBeHidden(activeview)]:
if sec_box.IsHidden(activeview):
activeview.UnhideElements(
framework.List[DB.ElementId]([sec_box.Id])
)
else:
activeview.HideElements(
framework.List[DB.ElementId]([sec_box.Id])
)
activeview.DisableTemporaryViewMode(
DB.TemporaryViewMode.RevealHiddenElements
)
toggle_sectionbox()
| gpl-3.0 |
Chaozz/happygg | src_tests/lib/googletest/scripts/fuse_gtest_files.py | 346 | 8884 | #!/usr/bin/env python
#
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""fuse_gtest_files.py v0.2.0
Fuses Google Test source code into a .h file and a .cc file.
SYNOPSIS
fuse_gtest_files.py [GTEST_ROOT_DIR] OUTPUT_DIR
Scans GTEST_ROOT_DIR for Google Test source code, and generates
two files: OUTPUT_DIR/gtest/gtest.h and OUTPUT_DIR/gtest/gtest-all.cc.
Then you can build your tests by adding OUTPUT_DIR to the include
search path and linking with OUTPUT_DIR/gtest/gtest-all.cc. These
two files contain everything you need to use Google Test. Hence
you can "install" Google Test by copying them to wherever you want.
GTEST_ROOT_DIR can be omitted and defaults to the parent
directory of the directory holding this script.
EXAMPLES
./fuse_gtest_files.py fused_gtest
./fuse_gtest_files.py path/to/unpacked/gtest fused_gtest
This tool is experimental. In particular, it assumes that there is no
conditional inclusion of Google Test headers. Please report any
problems to googletestframework@googlegroups.com. You can read
http://code.google.com/p/googletest/wiki/GoogleTestAdvancedGuide for
more information.
"""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import re
try:
from sets import Set as set # For Python 2.3 compatibility
except ImportError:
pass
import sys
# We assume that this file is in the scripts/ directory in the Google
# Test root directory.
DEFAULT_GTEST_ROOT_DIR = os.path.join(os.path.dirname(__file__), '..')
# Regex for matching '#include "gtest/..."'.
INCLUDE_GTEST_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(gtest/.+)"')
# Regex for matching '#include "src/..."'.
INCLUDE_SRC_FILE_REGEX = re.compile(r'^\s*#\s*include\s*"(src/.+)"')
# Where to find the source seed files.
GTEST_H_SEED = 'include/gtest/gtest.h'
GTEST_SPI_H_SEED = 'include/gtest/gtest-spi.h'
GTEST_ALL_CC_SEED = 'src/gtest-all.cc'
# Where to put the generated files.
GTEST_H_OUTPUT = 'gtest/gtest.h'
GTEST_ALL_CC_OUTPUT = 'gtest/gtest-all.cc'
def VerifyFileExists(directory, relative_path):
"""Verifies that the given file exists; aborts on failure.
relative_path is the file path relative to the given directory.
"""
if not os.path.isfile(os.path.join(directory, relative_path)):
print('ERROR: Cannot find %s in directory %s.' % (relative_path,
directory))
print('Please either specify a valid project root directory '
'or omit it on the command line.')
sys.exit(1)
def ValidateGTestRootDir(gtest_root):
"""Makes sure gtest_root points to a valid gtest root directory.
The function aborts the program on failure.
"""
VerifyFileExists(gtest_root, GTEST_H_SEED)
VerifyFileExists(gtest_root, GTEST_ALL_CC_SEED)
def VerifyOutputFile(output_dir, relative_path):
"""Verifies that the given output file path is valid.
relative_path is relative to the output_dir directory.
"""
# Makes sure the output file either doesn't exist or can be overwritten.
output_file = os.path.join(output_dir, relative_path)
if os.path.exists(output_file):
# TODO(wan@google.com): The following user-interaction doesn't
# work with automated processes. We should provide a way for the
# Makefile to force overwriting the files.
print('%s already exists in directory %s - overwrite it? (y/N) ' %
(relative_path, output_dir))
answer = sys.stdin.readline().strip()
if answer not in ['y', 'Y']:
print('ABORTED.')
sys.exit(1)
# Makes sure the directory holding the output file exists; creates
# it and all its ancestors if necessary.
parent_directory = os.path.dirname(output_file)
if not os.path.isdir(parent_directory):
os.makedirs(parent_directory)
def ValidateOutputDir(output_dir):
"""Makes sure output_dir points to a valid output directory.
The function aborts the program on failure.
"""
VerifyOutputFile(output_dir, GTEST_H_OUTPUT)
VerifyOutputFile(output_dir, GTEST_ALL_CC_OUTPUT)
def FuseGTestH(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest.h in output_dir."""
output_file = open(os.path.join(output_dir, GTEST_H_OUTPUT), 'w')
processed_files = set() # Holds all gtest headers we've processed.
def ProcessFile(gtest_header_path):
"""Processes the given gtest header file."""
# We don't process the same header twice.
if gtest_header_path in processed_files:
return
processed_files.add(gtest_header_path)
# Reads each line in the given gtest header.
for line in open(os.path.join(gtest_root, gtest_header_path), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
# It's '#include "gtest/..."' - let's process it recursively.
ProcessFile('include/' + m.group(1))
else:
# Otherwise we copy the line unchanged to the output file.
output_file.write(line)
ProcessFile(GTEST_H_SEED)
output_file.close()
def FuseGTestAllCcToFile(gtest_root, output_file):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_file."""
processed_files = set()
def ProcessFile(gtest_source_file):
"""Processes the given gtest source file."""
# We don't process the same #included file twice.
if gtest_source_file in processed_files:
return
processed_files.add(gtest_source_file)
# Reads each line in the given gtest source file.
for line in open(os.path.join(gtest_root, gtest_source_file), 'r'):
m = INCLUDE_GTEST_FILE_REGEX.match(line)
if m:
if 'include/' + m.group(1) == GTEST_SPI_H_SEED:
# It's '#include "gtest/gtest-spi.h"'. This file is not
# #included by "gtest/gtest.h", so we need to process it.
ProcessFile(GTEST_SPI_H_SEED)
else:
# It's '#include "gtest/foo.h"' where foo is not gtest-spi.
# We treat it as '#include "gtest/gtest.h"', as all other
# gtest headers are being fused into gtest.h and cannot be
# #included directly.
# There is no need to #include "gtest/gtest.h" more than once.
if not GTEST_H_SEED in processed_files:
processed_files.add(GTEST_H_SEED)
output_file.write('#include "%s"\n' % (GTEST_H_OUTPUT,))
else:
m = INCLUDE_SRC_FILE_REGEX.match(line)
if m:
# It's '#include "src/foo"' - let's process it recursively.
ProcessFile(m.group(1))
else:
output_file.write(line)
ProcessFile(GTEST_ALL_CC_SEED)
def FuseGTestAllCc(gtest_root, output_dir):
"""Scans folder gtest_root to generate gtest/gtest-all.cc in output_dir."""
output_file = open(os.path.join(output_dir, GTEST_ALL_CC_OUTPUT), 'w')
FuseGTestAllCcToFile(gtest_root, output_file)
output_file.close()
def FuseGTest(gtest_root, output_dir):
"""Fuses gtest.h and gtest-all.cc."""
ValidateGTestRootDir(gtest_root)
ValidateOutputDir(output_dir)
FuseGTestH(gtest_root, output_dir)
FuseGTestAllCc(gtest_root, output_dir)
def main():
argc = len(sys.argv)
if argc == 2:
# fuse_gtest_files.py OUTPUT_DIR
FuseGTest(DEFAULT_GTEST_ROOT_DIR, sys.argv[1])
elif argc == 3:
# fuse_gtest_files.py GTEST_ROOT_DIR OUTPUT_DIR
FuseGTest(sys.argv[1], sys.argv[2])
else:
print(__doc__)
sys.exit(1)
if __name__ == '__main__':
main()
| gpl-3.0 |
sigma-random/scrapy | scrapy/contrib/downloadermiddleware/httpcache.py | 33 | 4327 | from email.utils import formatdate
from scrapy import signals
from scrapy.exceptions import NotConfigured, IgnoreRequest
from scrapy.utils.misc import load_object
class HttpCacheMiddleware(object):
def __init__(self, settings, stats):
if not settings.getbool('HTTPCACHE_ENABLED'):
raise NotConfigured
self.policy = load_object(settings['HTTPCACHE_POLICY'])(settings)
self.storage = load_object(settings['HTTPCACHE_STORAGE'])(settings)
self.ignore_missing = settings.getbool('HTTPCACHE_IGNORE_MISSING')
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
o = cls(crawler.settings, crawler.stats)
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)
return o
def spider_opened(self, spider):
self.storage.open_spider(spider)
def spider_closed(self, spider):
self.storage.close_spider(spider)
def process_request(self, request, spider):
# Skip uncacheable requests
if not self.policy.should_cache_request(request):
request.meta['_dont_cache'] = True # flag as uncacheable
return
# Look for cached response and check if expired
cachedresponse = self.storage.retrieve_response(spider, request)
if cachedresponse is None:
self.stats.inc_value('httpcache/miss', spider=spider)
if self.ignore_missing:
self.stats.inc_value('httpcache/ignore', spider=spider)
raise IgnoreRequest("Ignored request not in cache: %s" % request)
return # first time request
# Return cached response only if not expired
cachedresponse.flags.append('cached')
if self.policy.is_cached_response_fresh(cachedresponse, request):
self.stats.inc_value('httpcache/hit', spider=spider)
return cachedresponse
# Keep a reference to cached response to avoid a second cache lookup on
# process_response hook
request.meta['cached_response'] = cachedresponse
def process_response(self, request, response, spider):
# Skip cached responses and uncacheable requests
if 'cached' in response.flags or '_dont_cache' in request.meta:
request.meta.pop('_dont_cache', None)
return response
# RFC2616 requires origin server to set Date header,
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.18
if 'Date' not in response.headers:
response.headers['Date'] = formatdate(usegmt=1)
# Do not validate first-hand responses
cachedresponse = request.meta.pop('cached_response', None)
if cachedresponse is None:
self.stats.inc_value('httpcache/firsthand', spider=spider)
self._cache_response(spider, response, request, cachedresponse)
return response
if self.policy.is_cached_response_valid(cachedresponse, response, request):
self.stats.inc_value('httpcache/revalidate', spider=spider)
return cachedresponse
self.stats.inc_value('httpcache/invalidate', spider=spider)
self._cache_response(spider, response, request, cachedresponse)
return response
def _cache_response(self, spider, response, request, cachedresponse):
if self.policy.should_cache_response(response, request):
self.stats.inc_value('httpcache/store', spider=spider)
self.storage.store_response(spider, request, response)
else:
self.stats.inc_value('httpcache/uncacheable', spider=spider)
from scrapy.contrib.httpcache import FilesystemCacheStorage as _FilesystemCacheStorage
class FilesystemCacheStorage(_FilesystemCacheStorage):
def __init__(self, *args, **kwargs):
import warnings
from scrapy.exceptions import ScrapyDeprecationWarning
warnings.warn('Importing FilesystemCacheStorage from '
'scrapy.contrib.downloadermiddlware.httpcache is '
'deprecated, use scrapy.contrib.httpcache instead.',
category=ScrapyDeprecationWarning, stacklevel=1)
super(FilesystemCacheStorage, self).__init__(*args, **kwargs)
| bsd-3-clause |
denbedilov/ATTENDER | server/attender-mobile/lib/requests/compat.py | 101 | 2600 | # -*- coding: utf-8 -*-
"""
pythoncompat
"""
from .packages import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try:
import simplejson as json
except (ImportError, SyntaxError):
# simplejson does not support Python 3.2, it thows a SyntaxError
# because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import quote, unquote, quote_plus, unquote_plus, urlencode, getproxies, proxy_bypass
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
from .packages.urllib3.packages.ordered_dict import OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
from collections import OrderedDict
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
| mit |
dmilith/SublimeText3-dmilith | Packages/pyyaml/st3/yaml/dumper.py | 20 | 2837 |
__all__ = ['BaseDumper', 'SafeDumper', 'Dumper']
from .emitter import *
from .serializer import *
from .representer import *
from .resolver import *
class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver):
def __init__(self, stream,
default_style=None, default_flow_style=False,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None, sort_keys=True):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style, sort_keys=sort_keys)
Resolver.__init__(self)
class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=False,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None, sort_keys=True):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
SafeRepresenter.__init__(self, default_style=default_style,
default_flow_style=default_flow_style, sort_keys=sort_keys)
Resolver.__init__(self)
class Dumper(Emitter, Serializer, Representer, Resolver):
def __init__(self, stream,
default_style=None, default_flow_style=False,
canonical=None, indent=None, width=None,
allow_unicode=None, line_break=None,
encoding=None, explicit_start=None, explicit_end=None,
version=None, tags=None, sort_keys=True):
Emitter.__init__(self, stream, canonical=canonical,
indent=indent, width=width,
allow_unicode=allow_unicode, line_break=line_break)
Serializer.__init__(self, encoding=encoding,
explicit_start=explicit_start, explicit_end=explicit_end,
version=version, tags=tags)
Representer.__init__(self, default_style=default_style,
default_flow_style=default_flow_style, sort_keys=sort_keys)
Resolver.__init__(self)
| mit |
prutseltje/ansible | lib/ansible/modules/cloud/amazon/_ec2_ami_find.py | 50 | 13458 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['deprecated'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: ec2_ami_find
version_added: '2.0'
short_description: Searches for AMIs to obtain the AMI ID and other information
deprecated:
removed_in: "2.9"
why: Various AWS modules have been combined and replaced with M(ec2_ami_facts).
alternative: Use M(ec2_ami_facts) instead.
description:
- Returns list of matching AMIs with AMI ID, along with other useful information
- Can search AMIs with different owners
- Can search by matching tag(s), by AMI name and/or other criteria
- Results can be sorted and sliced
author: "Tom Bamford (@tombamford)"
notes:
- This module is not backwards compatible with the previous version of the ec2_search_ami module which worked only for Ubuntu AMIs listed on
cloud-images.ubuntu.com.
- See the example below for a suggestion of how to search by distro/release.
options:
region:
description:
- The AWS region to use.
required: true
aliases: [ 'aws_region', 'ec2_region' ]
owner:
description:
- Search AMIs owned by the specified owner
- Can specify an AWS account ID, or one of the special IDs 'self', 'amazon' or 'aws-marketplace'
- If not specified, all EC2 AMIs in the specified region will be searched.
- You can include wildcards in many of the search options. An asterisk (*) matches zero or more characters, and a question mark (?) matches exactly one
character. You can escape special characters using a backslash (\) before the character. For example, a value of \*amazon\?\\ searches for the
literal string *amazon?\.
ami_id:
description:
- An AMI ID to match.
ami_tags:
description:
- A hash/dictionary of tags to match for the AMI.
architecture:
description:
- An architecture type to match (e.g. x86_64).
hypervisor:
description:
- A hypervisor type type to match (e.g. xen).
is_public:
description:
- Whether or not the image(s) are public.
type: bool
name:
description:
- An AMI name to match.
platform:
description:
- Platform type to match.
product_code:
description:
- Marketplace product code to match.
version_added: "2.3"
sort:
description:
- Optional attribute which with to sort the results.
- If specifying 'tag', the 'tag_name' parameter is required.
- Starting at version 2.1, additional sort choices of architecture, block_device_mapping, creationDate, hypervisor, is_public, location, owner_id,
platform, root_device_name, root_device_type, state, and virtualization_type are supported.
choices:
- 'name'
- 'description'
- 'tag'
- 'architecture'
- 'block_device_mapping'
- 'creationDate'
- 'hypervisor'
- 'is_public'
- 'location'
- 'owner_id'
- 'platform'
- 'root_device_name'
- 'root_device_type'
- 'state'
- 'virtualization_type'
sort_tag:
description:
- Tag name with which to sort results.
- Required when specifying 'sort=tag'.
sort_order:
description:
- Order in which to sort results.
- Only used when the 'sort' parameter is specified.
choices: ['ascending', 'descending']
default: 'ascending'
sort_start:
description:
- Which result to start with (when sorting).
- Corresponds to Python slice notation.
sort_end:
description:
- Which result to end with (when sorting).
- Corresponds to Python slice notation.
state:
description:
- AMI state to match.
default: 'available'
virtualization_type:
description:
- Virtualization type to match (e.g. hvm).
root_device_type:
description:
- Root device type to match (e.g. ebs, instance-store).
version_added: "2.5"
no_result_action:
description:
- What to do when no results are found.
- "'success' reports success and returns an empty array"
- "'fail' causes the module to report failure"
choices: ['success', 'fail']
default: 'success'
extends_documentation_fragment:
- aws
requirements:
- "python >= 2.6"
- boto
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Search for the AMI tagged "project:website"
- ec2_ami_find:
owner: self
ami_tags:
project: website
no_result_action: fail
register: ami_find
# Search for the latest Ubuntu 14.04 AMI
- ec2_ami_find:
name: "ubuntu/images/ebs/ubuntu-trusty-14.04-amd64-server-*"
owner: 099720109477
sort: name
sort_order: descending
sort_end: 1
register: ami_find
# Launch an EC2 instance
- ec2:
image: "{{ ami_find.results[0].ami_id }}"
instance_type: m3.medium
key_name: mykey
wait: yes
'''
RETURN = '''
ami_id:
description: id of found amazon image
returned: when AMI found
type: string
sample: "ami-e9095e8c"
architecture:
description: architecture of image
returned: when AMI found
type: string
sample: "x86_64"
block_device_mapping:
description: block device mapping associated with image
returned: when AMI found
type: dict
sample: "{
'/dev/xvda': {
'delete_on_termination': true,
'encrypted': false,
'size': 8,
'snapshot_id': 'snap-ca0330b8',
'volume_type': 'gp2'
}"
creationDate:
description: creation date of image
returned: when AMI found
type: string
sample: "2015-10-15T22:43:44.000Z"
description:
description: description of image
returned: when AMI found
type: string
sample: "test-server01"
hypervisor:
description: type of hypervisor
returned: when AMI found
type: string
sample: "xen"
is_public:
description: whether image is public
returned: when AMI found
type: bool
sample: false
location:
description: location of image
returned: when AMI found
type: string
sample: "435210894375/test-server01-20151015-234343"
name:
description: ami name of image
returned: when AMI found
type: string
sample: "test-server01-20151015-234343"
owner_id:
description: owner of image
returned: when AMI found
type: string
sample: "435210894375"
platform:
description: platform of image
returned: when AMI found
type: string
sample: null
root_device_name:
description: root device name of image
returned: when AMI found
type: string
sample: "/dev/xvda"
root_device_type:
description: root device type of image
returned: when AMI found
type: string
sample: "ebs"
state:
description: state of image
returned: when AMI found
type: string
sample: "available"
tags:
description: tags assigned to image
returned: when AMI found
type: dict
sample: "{
'Environment': 'devel',
'Name': 'test-server01',
'Role': 'web'
}"
virtualization_type:
description: image virtualization type
returned: when AMI found
type: string
sample: "hvm"
'''
import json
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import HAS_BOTO, ec2_argument_spec, ec2_connect
def get_block_device_mapping(image):
"""
Retrieves block device mapping from AMI
"""
bdm_dict = dict()
bdm = getattr(image, 'block_device_mapping')
for device_name in bdm.keys():
bdm_dict[device_name] = {
'size': bdm[device_name].size,
'snapshot_id': bdm[device_name].snapshot_id,
'volume_type': bdm[device_name].volume_type,
'encrypted': bdm[device_name].encrypted,
'delete_on_termination': bdm[device_name].delete_on_termination
}
return bdm_dict
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
owner=dict(required=False, default=None),
ami_id=dict(required=False),
ami_tags=dict(required=False, type='dict',
aliases=['search_tags', 'image_tags']),
architecture=dict(required=False),
hypervisor=dict(required=False),
is_public=dict(required=False, type='bool'),
name=dict(required=False),
platform=dict(required=False),
product_code=dict(required=False),
sort=dict(required=False, default=None,
choices=['name', 'description', 'tag', 'architecture', 'block_device_mapping', 'creationDate', 'hypervisor', 'is_public', 'location',
'owner_id', 'platform', 'root_device_name', 'root_device_type', 'state', 'virtualization_type']),
sort_tag=dict(required=False),
sort_order=dict(required=False, default='ascending',
choices=['ascending', 'descending']),
sort_start=dict(required=False),
sort_end=dict(required=False),
state=dict(required=False, default='available'),
virtualization_type=dict(required=False),
no_result_action=dict(required=False, default='success',
choices=['success', 'fail']),
)
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
module.deprecate("The 'ec2_ami_find' module has been deprecated. Use 'ec2_ami_facts' instead.", version=2.9)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module, install via pip or your package manager')
ami_id = module.params.get('ami_id')
ami_tags = module.params.get('ami_tags')
architecture = module.params.get('architecture')
hypervisor = module.params.get('hypervisor')
is_public = module.params.get('is_public')
name = module.params.get('name')
owner = module.params.get('owner')
platform = module.params.get('platform')
product_code = module.params.get('product_code')
root_device_type = module.params.get('root_device_type')
sort = module.params.get('sort')
sort_tag = module.params.get('sort_tag')
sort_order = module.params.get('sort_order')
sort_start = module.params.get('sort_start')
sort_end = module.params.get('sort_end')
state = module.params.get('state')
virtualization_type = module.params.get('virtualization_type')
no_result_action = module.params.get('no_result_action')
filter = {'state': state}
if ami_id:
filter['image_id'] = ami_id
if ami_tags:
for tag in ami_tags:
filter['tag:' + tag] = ami_tags[tag]
if architecture:
filter['architecture'] = architecture
if hypervisor:
filter['hypervisor'] = hypervisor
if is_public:
filter['is_public'] = 'true'
if name:
filter['name'] = name
if platform:
filter['platform'] = platform
if product_code:
filter['product-code'] = product_code
if root_device_type:
filter['root_device_type'] = root_device_type
if virtualization_type:
filter['virtualization_type'] = virtualization_type
ec2 = ec2_connect(module)
images_result = ec2.get_all_images(owners=owner, filters=filter)
if no_result_action == 'fail' and len(images_result) == 0:
module.fail_json(msg="No AMIs matched the attributes: %s" % json.dumps(filter))
results = []
for image in images_result:
data = {
'ami_id': image.id,
'architecture': image.architecture,
'block_device_mapping': get_block_device_mapping(image),
'creationDate': image.creationDate,
'description': image.description,
'hypervisor': image.hypervisor,
'is_public': image.is_public,
'location': image.location,
'name': image.name,
'owner_id': image.owner_id,
'platform': image.platform,
'root_device_name': image.root_device_name,
'root_device_type': image.root_device_type,
'state': image.state,
'tags': image.tags,
'virtualization_type': image.virtualization_type,
}
if image.kernel_id:
data['kernel_id'] = image.kernel_id
if image.ramdisk_id:
data['ramdisk_id'] = image.ramdisk_id
results.append(data)
if sort == 'tag':
if not sort_tag:
module.fail_json(msg="'sort_tag' option must be given with 'sort=tag'")
results.sort(key=lambda e: e['tags'][sort_tag], reverse=(sort_order == 'descending'))
elif sort:
results.sort(key=lambda e: e[sort], reverse=(sort_order == 'descending'))
try:
if sort and sort_start and sort_end:
results = results[int(sort_start):int(sort_end)]
elif sort and sort_start:
results = results[int(sort_start):]
elif sort and sort_end:
results = results[:int(sort_end)]
except TypeError:
module.fail_json(msg="Please supply numeric values for sort_start and/or sort_end")
module.exit_json(results=results)
if __name__ == '__main__':
main()
| gpl-3.0 |
alphagov/notifications-api | migrations/versions/0090_inbound_sms.py | 1 | 1280 | """empty message
Revision ID: 0090_inbound_sms
Revises: 0089_govuk_sms_sender
Create Date: 2017-05-22 11:28:53.471004
"""
# revision identifiers, used by Alembic.
revision = '0090_inbound_sms'
down_revision = '0089_govuk_sms_sender'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
def upgrade():
op.create_table(
'inbound_sms',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('service_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('content', sa.String, nullable=False),
sa.Column('notify_number', sa.String, nullable=False),
sa.Column('user_number', sa.String, nullable=False),
sa.Column('created_at', sa.DateTime, nullable=False),
sa.Column('provider_date', sa.DateTime, nullable=True),
sa.Column('provider_reference', sa.String, nullable=True),
sa.ForeignKeyConstraint(['service_id'], ['services.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_inbound_sms_service_id'), 'inbound_sms', ['service_id'], unique=False)
op.create_index(op.f('ix_inbound_sms_user_number'), 'inbound_sms', ['user_number'], unique=False)
def downgrade():
op.drop_table('inbound_sms')
| mit |
ProfessorX/Config | .PyCharm30/system/python_stubs/-1247971765/PyKDE4/kio/KFileItemDelegate.py | 1 | 2969 | # encoding: utf-8
# module PyKDE4.kio
# from /usr/lib/python3/dist-packages/PyKDE4/kio.cpython-34m-x86_64-linux-gnu.so
# by generator 1.135
# no doc
# imports
import PyKDE4.kdeui as __PyKDE4_kdeui
import PyQt4.QtCore as __PyQt4_QtCore
import PyQt4.QtGui as __PyQt4_QtGui
class KFileItemDelegate(__PyQt4_QtGui.QAbstractItemDelegate):
# no doc
def createEditor(self, *args, **kwargs): # real signature unknown
pass
def editorEvent(self, *args, **kwargs): # real signature unknown
pass
def eventFilter(self, *args, **kwargs): # real signature unknown
pass
def helpEvent(self, *args, **kwargs): # real signature unknown
pass
def iconRect(self, *args, **kwargs): # real signature unknown
pass
def jobTransfersVisible(self, *args, **kwargs): # real signature unknown
pass
def maximumSize(self, *args, **kwargs): # real signature unknown
pass
def paint(self, *args, **kwargs): # real signature unknown
pass
def setEditorData(self, *args, **kwargs): # real signature unknown
pass
def setJobTransfersVisible(self, *args, **kwargs): # real signature unknown
pass
def setMaximumSize(self, *args, **kwargs): # real signature unknown
pass
def setModelData(self, *args, **kwargs): # real signature unknown
pass
def setShadowBlur(self, *args, **kwargs): # real signature unknown
pass
def setShadowColor(self, *args, **kwargs): # real signature unknown
pass
def setShadowOffset(self, *args, **kwargs): # real signature unknown
pass
def setShowInformation(self, *args, **kwargs): # real signature unknown
pass
def setShowToolTipWhenElided(self, *args, **kwargs): # real signature unknown
pass
def setWrapMode(self, *args, **kwargs): # real signature unknown
pass
def shadowBlur(self, *args, **kwargs): # real signature unknown
pass
def shadowColor(self, *args, **kwargs): # real signature unknown
pass
def shadowOffset(self, *args, **kwargs): # real signature unknown
pass
def shape(self, *args, **kwargs): # real signature unknown
pass
def showToolTipWhenElided(self, *args, **kwargs): # real signature unknown
pass
def sizeHint(self, *args, **kwargs): # real signature unknown
pass
def updateEditorGeometry(self, *args, **kwargs): # real signature unknown
pass
def wrapMode(self, *args, **kwargs): # real signature unknown
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
AccessTime = 8
Comment = 13
CreationTime = 6
FriendlyMimeType = 10
Information = None # (!) real value is ''
LinkDest = 11
LocalPathOrUrl = 12
MimeType = 9
ModificationTime = 7
NoInformation = 0
OctalPermissions = 3
Owner = 4
OwnerAndGroup = 5
Permissions = 2
Size = 1
| gpl-2.0 |
flit/pyOCD | test/unit/test_regcache.py | 3 | 9133 | # pyOCD debugger
# Copyright (c) 2016-2019 Arm Limited
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import logging
from pyocd.cache.register import RegisterCache
from pyocd.debug.context import DebugContext
from pyocd.coresight.cortex_m import CortexM
from pyocd.coresight.cortex_m_core_registers import CortexMCoreRegisterInfo
from pyocd.core import memory_map
from pyocd.utility import conversion
from pyocd.utility import mask
@pytest.fixture(scope='function')
def regcache(mockcore):
return RegisterCache(DebugContext(mockcore), mockcore)
@pytest.fixture(scope='function')
def regcache_no_fpu(mockcore_no_fpu):
return RegisterCache(DebugContext(mockcore_no_fpu), mockcore_no_fpu)
COMPOSITES = [
'cfbp',
'xpsr',
'iapsr',
'eapsr',
'iepsr',
]
# Appropriate modifiers for masked registers - others modified by adding 7
REG_MODIFIER = {
'apsr': 0x30010000,
'epsr': 0x01000C00,
}
# Return list of reg names from the core, excluding composite regs.
def core_regs_composite_regs(core):
return list(r for r in core.core_registers.by_name.keys() if r not in COMPOSITES)
def get_modifier(r):
return REG_MODIFIER.get(r, 7)
def get_expected_reg_value(r):
i = CortexMCoreRegisterInfo.register_name_to_index(r)
if CortexMCoreRegisterInfo.get(i).is_psr_subregister:
return 0x55555555 & CortexMCoreRegisterInfo.get(i).psr_mask
if i < 0:
i += 100
return i + 1
def get_expected_cfbp():
return ((get_expected_reg_value('control') << 24) |
(get_expected_reg_value('faultmask') << 16) |
(get_expected_reg_value('basepri') << 8) |
get_expected_reg_value('primask'))
def get_expected_xpsr():
return (get_expected_reg_value('apsr') |
get_expected_reg_value('ipsr') |
get_expected_reg_value('epsr'))
class TestRegisterCache:
def set_core_regs(self, mockcore, modify=False):
for r in core_regs_composite_regs(mockcore):
if modify:
modifier = get_modifier(r)
else:
modifier = 0
mockcore.write_core_registers_raw([r], [get_expected_reg_value(r) + modifier])
assert mockcore.read_core_registers_raw([r]) == [get_expected_reg_value(r) + modifier]
def test_r_1(self, mockcore, regcache):
assert regcache.read_core_registers_raw(['r0']) == [0] # cache initial value of 0
mockcore.write_core_registers_raw(['r0'], [1234]) # modify reg behind the cache's back
assert mockcore.read_core_registers_raw(['r0']) == [1234] # verify modified reg
assert regcache.read_core_registers_raw(['r0']) == [0] # should return cached 0 value
regcache.invalidate() # explicitly invalidate cache
assert mockcore.read_core_registers_raw(['r0']) == [1234] # verify modified reg
assert regcache.read_core_registers_raw(['r0']) == [1234] # now should return updated 1234 value
def test_run_token(self, mockcore, regcache):
assert regcache.read_core_registers_raw(['r0']) == [0] # cache initial value of 0
mockcore.write_core_registers_raw(['r0'], [1234]) # modify reg behind the cache's back
assert mockcore.read_core_registers_raw(['r0']) == [1234] # verify modified reg
assert regcache.read_core_registers_raw(['r0']) == [0] # should return cached 0 value
mockcore.run_token += 1 # bump run token to cause cache to invalidate
assert regcache.read_core_registers_raw(['r0']) == [1234] # now should return updated 1234 value
def test_reading_from_core(self, mockcore, regcache):
self.set_core_regs(mockcore)
for r in core_regs_composite_regs(mockcore):
assert regcache.read_core_registers_raw([r]) == [get_expected_reg_value(r)]
def test_read_cached(self, mockcore, regcache):
self.set_core_regs(mockcore)
# cache all regs
regcache.read_core_registers_raw(core_regs_composite_regs(mockcore))
# modify regs in mock core
self.set_core_regs(mockcore, True)
# cache should return original unmodified values
for r in core_regs_composite_regs(mockcore):
assert regcache.read_core_registers_raw([r]) == [get_expected_reg_value(r)]
def test_read_cfbp(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert regcache.read_core_registers_raw(['cfbp', 'control', 'faultmask']) == [
get_expected_cfbp(), get_expected_reg_value('control'), get_expected_reg_value('faultmask')
]
def test_read_xpsr(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert regcache.read_core_registers_raw(['xpsr', 'ipsr', 'apsr', 'eapsr']) == [
get_expected_xpsr(), get_expected_reg_value('ipsr'),
get_expected_reg_value('apsr'), get_expected_reg_value('eapsr')
]
def test_read_cached_cfbp(self, mockcore, regcache):
self.set_core_regs(mockcore)
# cache it
regcache.read_core_registers_raw(['cfbp'])
# modify behind the cache's back
mockcore.write_core_registers_raw(['control', 'primask'], [0x55, 0xaa])
# cache should return original value
assert regcache.read_core_registers_raw(['cfbp']) == [get_expected_cfbp()]
def test_read_cached_xpsr(self, mockcore, regcache):
self.set_core_regs(mockcore)
# cache it
regcache.read_core_registers_raw(['xpsr'])
# modify behind the cache's back
mockcore.write_core_registers_raw(['ipsr', 'apsr'], [0x22, 0x10000000])
# cache should return original value
assert regcache.read_core_registers_raw(['xpsr']) == [get_expected_xpsr()]
def test_write_1(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert mockcore.read_core_registers_raw(['r0']) == [get_expected_reg_value('r0')]
assert regcache.read_core_registers_raw(['r0']) == [get_expected_reg_value('r0')]
regcache.write_core_registers_raw(['r0'], [1234])
assert mockcore.read_core_registers_raw(['r0']) == [1234]
assert regcache.read_core_registers_raw(['r0']) == [1234]
def test_write_regs(self, mockcore, regcache):
self.set_core_regs(mockcore)
for r in core_regs_composite_regs(mockcore):
regcache.write_core_registers_raw([r], [get_expected_reg_value(r) + get_modifier(r)])
for r in core_regs_composite_regs(mockcore):
assert mockcore.read_core_registers_raw([r]) == [get_expected_reg_value(r) + get_modifier(r)]
def test_write_cfbp(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert mockcore.read_core_registers_raw(['cfbp']) == [get_expected_cfbp()]
regcache.write_core_registers_raw(['control', 'primask'], [3, 19])
assert mockcore.read_core_registers_raw(['control', 'primask', 'cfbp']) == [
3, 19,
((3 << 24) | (get_expected_reg_value('faultmask') << 16) |
(get_expected_reg_value('basepri') << 8) | 19)
]
def test_write_xpsr(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert mockcore.read_core_registers_raw(['xpsr']) == [get_expected_xpsr()]
regcache.write_core_registers_raw(['iapsr'], [0x10000022])
assert mockcore.read_core_registers_raw(['ipsr', 'apsr', 'iapsr', 'xpsr']) == [
0x22, 0x10000000, 0x10000022,
0x10000022 | get_expected_reg_value('epsr')
]
def test_write_full_xpsr(self, mockcore, regcache):
self.set_core_regs(mockcore)
assert mockcore.read_core_registers_raw(['xpsr']) == [get_expected_xpsr()]
regcache.write_core_registers_raw(['xpsr'], [0xffffffff])
assert mockcore.read_core_registers_raw(['ipsr', 'apsr', 'epsr', 'xpsr']) == [
CortexM.IPSR_MASK, CortexM.APSR_MASK, CortexM.EPSR_MASK,
0xffffffff
]
def test_invalid_reg_r(self, regcache):
with pytest.raises(KeyError):
regcache.read_core_registers_raw([132423])
def test_invalid_reg_w(self, regcache):
with pytest.raises(KeyError):
regcache.write_core_registers_raw([132423], [1234])
def test_invalid_fpu_reg_r(self, regcache_no_fpu):
with pytest.raises(KeyError):
regcache_no_fpu.read_core_registers_raw(['s1'])
def test_invalid_fpu_reg_w(self, regcache_no_fpu):
with pytest.raises(KeyError):
regcache_no_fpu.write_core_registers_raw(['s1'], [1.234])
| apache-2.0 |
0jpq0/kbengine | kbe/res/scripts/common/Lib/keyword.py | 162 | 2211 | #! /usr/bin/env python3
"""Keywords (from "graminit.c")
This file is automatically generated; please don't muck it up!
To update the symbols in this file, 'cd' to the top directory of
the python source tree after building the interpreter and run:
./python Lib/keyword.py
"""
__all__ = ["iskeyword", "kwlist"]
kwlist = [
#--start keywords--
'False',
'None',
'True',
'and',
'as',
'assert',
'break',
'class',
'continue',
'def',
'del',
'elif',
'else',
'except',
'finally',
'for',
'from',
'global',
'if',
'import',
'in',
'is',
'lambda',
'nonlocal',
'not',
'or',
'pass',
'raise',
'return',
'try',
'while',
'with',
'yield',
#--end keywords--
]
iskeyword = frozenset(kwlist).__contains__
def main():
import sys, re
args = sys.argv[1:]
iptfile = args and args[0] or "Python/graminit.c"
if len(args) > 1: optfile = args[1]
else: optfile = "Lib/keyword.py"
# load the output skeleton from the target, taking care to preserve its
# newline convention.
with open(optfile, newline='') as fp:
format = fp.readlines()
nl = format[0][len(format[0].strip()):] if format else '\n'
# scan the source file for keywords
with open(iptfile) as fp:
strprog = re.compile('"([^"]+)"')
lines = []
for line in fp:
if '{1, "' in line:
match = strprog.search(line)
if match:
lines.append(" '" + match.group(1) + "'," + nl)
lines.sort()
# insert the lines of keywords into the skeleton
try:
start = format.index("#--start keywords--" + nl) + 1
end = format.index("#--end keywords--" + nl)
format[start:end] = lines
except ValueError:
sys.stderr.write("target does not contain format markers\n")
sys.exit(1)
# write the output file
with open(optfile, 'w', newline='') as fp:
fp.writelines(format)
if __name__ == "__main__":
main()
| lgpl-3.0 |
837468220/python-for-android | python-modules/twisted/twisted/web/test/test_static.py | 49 | 55073 | # Copyright (c) 2001-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.web.static}.
"""
import os, re, StringIO
from zope.interface.verify import verifyObject
from twisted.internet import abstract, interfaces
from twisted.python.compat import set
from twisted.python.runtime import platform
from twisted.python.filepath import FilePath
from twisted.python import log
from twisted.trial.unittest import TestCase
from twisted.web import static, http, script, resource
from twisted.web.server import UnsupportedMethod
from twisted.web.test.test_web import DummyRequest
from twisted.web.test._util import _render
class StaticDataTests(TestCase):
"""
Tests for L{Data}.
"""
def test_headRequest(self):
"""
L{Data.render} returns an empty response body for a I{HEAD} request.
"""
data = static.Data("foo", "bar")
request = DummyRequest([''])
request.method = 'HEAD'
d = _render(data, request)
def cbRendered(ignored):
self.assertEqual(''.join(request.written), "")
d.addCallback(cbRendered)
return d
def test_invalidMethod(self):
"""
L{Data.render} raises L{UnsupportedMethod} in response to a non-I{GET},
non-I{HEAD} request.
"""
data = static.Data("foo", "bar")
request = DummyRequest([''])
request.method = 'POST'
self.assertRaises(UnsupportedMethod, data.render, request)
class StaticFileTests(TestCase):
"""
Tests for the basic behavior of L{File}.
"""
def _render(self, resource, request):
return _render(resource, request)
def test_invalidMethod(self):
"""
L{File.render} raises L{UnsupportedMethod} in response to a non-I{GET},
non-I{HEAD} request.
"""
request = DummyRequest([''])
request.method = 'POST'
path = FilePath(self.mktemp())
path.setContent("foo")
file = static.File(path.path)
self.assertRaises(UnsupportedMethod, file.render, request)
def test_notFound(self):
"""
If a request is made which encounters a L{File} before a final segment
which does not correspond to any file in the path the L{File} was
created with, a not found response is sent.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest(['foobar'])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 404)
d.addCallback(cbRendered)
return d
def test_emptyChild(self):
"""
The C{''} child of a L{File} which corresponds to a directory in the
filesystem is a L{DirectoryLister}.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest([''])
child = resource.getChildForRequest(file, request)
self.assertIsInstance(child, static.DirectoryLister)
self.assertEqual(child.path, base.path)
def test_securityViolationNotFound(self):
"""
If a request is made which encounters a L{File} before a final segment
which cannot be looked up in the filesystem due to security
considerations, a not found response is sent.
"""
base = FilePath(self.mktemp())
base.makedirs()
file = static.File(base.path)
request = DummyRequest(['..'])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 404)
d.addCallback(cbRendered)
return d
def test_forbiddenResource(self):
"""
If the file in the filesystem which would satisfy a request cannot be
read, L{File.render} sets the HTTP response code to I{FORBIDDEN}.
"""
base = FilePath(self.mktemp())
base.setContent('')
# Make sure we can delete the file later.
self.addCleanup(base.chmod, 0700)
# Get rid of our own read permission.
base.chmod(0)
file = static.File(base.path)
request = DummyRequest([''])
d = self._render(file, request)
def cbRendered(ignored):
self.assertEqual(request.responseCode, 403)
d.addCallback(cbRendered)
return d
if platform.isWindows():
test_forbiddenResource.skip = "Cannot remove read permission on Windows"
def test_indexNames(self):
"""
If a request is made which encounters a L{File} before a final empty
segment, a file in the L{File} instance's C{indexNames} list which
exists in the path the L{File} was created with is served as the
response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent("baz")
file = static.File(base.path)
file.indexNames = ['foo.bar']
request = DummyRequest([''])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(''.join(request.written), 'baz')
self.assertEqual(request.outgoingHeaders['content-length'], '3')
d.addCallback(cbRendered)
return d
def test_staticFile(self):
"""
If a request is made which encounters a L{File} before a final segment
which names a file in the path the L{File} was created with, that file
is served as the response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent("baz")
file = static.File(base.path)
request = DummyRequest(['foo.bar'])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(''.join(request.written), 'baz')
self.assertEqual(request.outgoingHeaders['content-length'], '3')
d.addCallback(cbRendered)
return d
def test_staticFileDeletedGetChild(self):
"""
A L{static.File} created for a directory which does not exist should
return childNotFound from L{static.File.getChild}.
"""
staticFile = static.File(self.mktemp())
request = DummyRequest(['foo.bar'])
child = staticFile.getChild("foo.bar", request)
self.assertEquals(child, staticFile.childNotFound)
def test_staticFileDeletedRender(self):
"""
A L{static.File} created for a file which does not exist should render
its C{childNotFound} page.
"""
staticFile = static.File(self.mktemp())
request = DummyRequest(['foo.bar'])
request2 = DummyRequest(['foo.bar'])
d = self._render(staticFile, request)
d2 = self._render(staticFile.childNotFound, request2)
def cbRendered2(ignored):
def cbRendered(ignored):
self.assertEquals(''.join(request.written),
''.join(request2.written))
d.addCallback(cbRendered)
return d
d2.addCallback(cbRendered2)
return d2
def test_headRequest(self):
"""
L{static.File.render} returns an empty response body for I{HEAD}
requests.
"""
path = FilePath(self.mktemp())
path.setContent("foo")
file = static.File(path.path)
request = DummyRequest([''])
request.method = 'HEAD'
d = _render(file, request)
def cbRendered(ignored):
self.assertEqual("".join(request.written), "")
d.addCallback(cbRendered)
return d
def test_processors(self):
"""
If a request is made which encounters a L{File} before a final segment
which names a file with an extension which is in the L{File}'s
C{processors} mapping, the processor associated with that extension is
used to serve the response to the request.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child("foo.bar").setContent(
"from twisted.web.static import Data\n"
"resource = Data('dynamic world','text/plain')\n")
file = static.File(base.path)
file.processors = {'.bar': script.ResourceScript}
request = DummyRequest(["foo.bar"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(''.join(request.written), 'dynamic world')
self.assertEqual(request.outgoingHeaders['content-length'], '13')
d.addCallback(cbRendered)
return d
def test_ignoreExt(self):
"""
The list of ignored extensions can be set by passing a value to
L{File.__init__} or by calling L{File.ignoreExt} later.
"""
file = static.File(".")
self.assertEqual(file.ignoredExts, [])
file.ignoreExt(".foo")
file.ignoreExt(".bar")
self.assertEqual(file.ignoredExts, [".foo", ".bar"])
file = static.File(".", ignoredExts=(".bar", ".baz"))
self.assertEqual(file.ignoredExts, [".bar", ".baz"])
def test_ignoredExtensionsIgnored(self):
"""
A request for the I{base} child of a L{File} succeeds with a resource
for the I{base<extension>} file in the path the L{File} was created
with if such a file exists and the L{File} has been configured to
ignore the I{<extension>} extension.
"""
base = FilePath(self.mktemp())
base.makedirs()
base.child('foo.bar').setContent('baz')
base.child('foo.quux').setContent('foobar')
file = static.File(base.path, ignoredExts=(".bar",))
request = DummyRequest(["foo"])
child = resource.getChildForRequest(file, request)
d = self._render(child, request)
def cbRendered(ignored):
self.assertEqual(''.join(request.written), 'baz')
d.addCallback(cbRendered)
return d
class StaticMakeProducerTests(TestCase):
"""
Tests for L{File.makeProducer}.
"""
def makeResourceWithContent(self, content, type=None, encoding=None):
"""
Make a L{static.File} resource that has C{content} for its content.
@param content: The bytes to use as the contents of the resource.
@param type: Optional value for the content type of the resource.
"""
fileName = self.mktemp()
fileObject = open(fileName, 'w')
fileObject.write(content)
fileObject.close()
resource = static.File(fileName)
resource.encoding = encoding
resource.type = type
return resource
def contentHeaders(self, request):
"""
Extract the content-* headers from the L{DummyRequest} C{request}.
This returns the subset of C{request.outgoingHeaders} of headers that
start with 'content-'.
"""
contentHeaders = {}
for k, v in request.outgoingHeaders.iteritems():
if k.startswith('content-'):
contentHeaders[k] = v
return contentHeaders
def test_noRangeHeaderGivesNoRangeStaticProducer(self):
"""
makeProducer when no Range header is set returns an instance of
NoRangeStaticProducer.
"""
resource = self.makeResourceWithContent('')
request = DummyRequest([])
producer = resource.makeProducer(request, resource.openForReading())
self.assertIsInstance(producer, static.NoRangeStaticProducer)
def test_noRangeHeaderSets200OK(self):
"""
makeProducer when no Range header is set sets the responseCode on the
request to 'OK'.
"""
resource = self.makeResourceWithContent('')
request = DummyRequest([])
resource.makeProducer(request, resource.openForReading())
self.assertEqual(http.OK, request.responseCode)
def test_noRangeHeaderSetsContentHeaders(self):
"""
makeProducer when no Range header is set sets the Content-* headers
for the response.
"""
length = 123
contentType = "text/plain"
contentEncoding = 'gzip'
resource = self.makeResourceWithContent(
'a'*length, type=contentType, encoding=contentEncoding)
request = DummyRequest([])
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
{'content-type': contentType, 'content-length': str(length),
'content-encoding': contentEncoding},
self.contentHeaders(request))
def test_singleRangeGivesSingleRangeStaticProducer(self):
"""
makeProducer when the Range header requests a single byte range
returns an instance of SingleRangeStaticProducer.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3'
resource = self.makeResourceWithContent('abcdef')
producer = resource.makeProducer(request, resource.openForReading())
self.assertIsInstance(producer, static.SingleRangeStaticProducer)
def test_singleRangeSets206PartialContent(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the response code on the request to 'Partial Content'.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3'
resource = self.makeResourceWithContent('abcdef')
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
http.PARTIAL_CONTENT, request.responseCode)
def test_singleRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3'
contentType = "text/plain"
contentEncoding = 'gzip'
resource = self.makeResourceWithContent('abcdef', type=contentType, encoding=contentEncoding)
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
{'content-type': contentType, 'content-encoding': contentEncoding,
'content-range': 'bytes 1-3/6', 'content-length': '3'},
self.contentHeaders(request))
def test_singleUnsatisfiableRangeReturnsSingleRangeStaticProducer(self):
"""
makeProducer still returns an instance of L{SingleRangeStaticProducer}
when the Range header requests a single unsatisfiable byte range.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=4-10'
resource = self.makeResourceWithContent('abc')
producer = resource.makeProducer(request, resource.openForReading())
self.assertIsInstance(producer, static.SingleRangeStaticProducer)
def test_singleUnsatisfiableRangeSets416ReqestedRangeNotSatisfiable(self):
"""
makeProducer sets the response code of the request to of 'Requested
Range Not Satisfiable' when the Range header requests a single
unsatisfiable byte range.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=4-10'
resource = self.makeResourceWithContent('abc')
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
def test_singleUnsatisfiableRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, unsatisfiable
byte range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=4-10'
contentType = "text/plain"
resource = self.makeResourceWithContent('abc', type=contentType)
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
{'content-type': 'text/plain', 'content-length': '0',
'content-range': 'bytes */3'},
self.contentHeaders(request))
def test_singlePartiallyOverlappingRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single byte range that
partly overlaps the resource sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=2-10'
contentType = "text/plain"
resource = self.makeResourceWithContent('abc', type=contentType)
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
{'content-type': 'text/plain', 'content-length': '1',
'content-range': 'bytes 2-2/3'},
self.contentHeaders(request))
def test_multipleRangeGivesMultipleRangeStaticProducer(self):
"""
makeProducer when the Range header requests a single byte range
returns an instance of MultipleRangeStaticProducer.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3,5-6'
resource = self.makeResourceWithContent('abcdef')
producer = resource.makeProducer(request, resource.openForReading())
self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
def test_multipleRangeSets206PartialContent(self):
"""
makeProducer when the Range header requests a multiple satisfiable
byte ranges sets the response code on the request to 'Partial
Content'.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3,5-6'
resource = self.makeResourceWithContent('abcdef')
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
http.PARTIAL_CONTENT, request.responseCode)
def test_mutipleRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests a single, satisfiable byte
range sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3,5-6'
resource = self.makeResourceWithContent(
'abcdefghijkl', encoding='gzip')
producer = resource.makeProducer(request, resource.openForReading())
contentHeaders = self.contentHeaders(request)
# The only content-* headers set are content-type and content-length.
self.assertEqual(
set(['content-length', 'content-type']),
set(contentHeaders.keys()))
# The content-length depends on the boundary used in the response.
expectedLength = 5
for boundary, offset, size in producer.rangeInfo:
expectedLength += len(boundary)
self.assertEqual(expectedLength, contentHeaders['content-length'])
# Content-type should be set to a value indicating a multipart
# response and the boundary used to separate the parts.
self.assertIn('content-type', contentHeaders)
contentType = contentHeaders['content-type']
self.assertNotIdentical(
None, re.match(
'multipart/byteranges; boundary="[^"]*"\Z', contentType))
# Content-encoding is not set in the response to a multiple range
# response, which is a bit wussy but works well enough with the way
# static.File does content-encodings...
self.assertNotIn('content-encoding', contentHeaders)
def test_multipleUnsatisfiableRangesReturnsMultipleRangeStaticProducer(self):
"""
makeProducer still returns an instance of L{SingleRangeStaticProducer}
when the Range header requests multiple ranges, none of which are
satisfiable.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=10-12,15-20'
resource = self.makeResourceWithContent('abc')
producer = resource.makeProducer(request, resource.openForReading())
self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
def test_multipleUnsatisfiableRangesSets416ReqestedRangeNotSatisfiable(self):
"""
makeProducer sets the response code of the request to of 'Requested
Range Not Satisfiable' when the Range header requests multiple ranges,
none of which are satisfiable.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=10-12,15-20'
resource = self.makeResourceWithContent('abc')
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
def test_multipleUnsatisfiableRangeSetsContentHeaders(self):
"""
makeProducer when the Range header requests multiple ranges, none of
which are satisfiable, sets the Content-* headers appropriately.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=4-10'
contentType = "text/plain"
request.headers['range'] = 'bytes=10-12,15-20'
resource = self.makeResourceWithContent('abc', type=contentType)
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
{'content-length': '0', 'content-range': 'bytes */3'},
self.contentHeaders(request))
def test_oneSatisfiableRangeIsEnough(self):
"""
makeProducer when the Range header requests multiple ranges, at least
one of which matches, sets the response code to 'Partial Content'.
"""
request = DummyRequest([])
request.headers['range'] = 'bytes=1-3,100-200'
resource = self.makeResourceWithContent('abcdef')
resource.makeProducer(request, resource.openForReading())
self.assertEqual(
http.PARTIAL_CONTENT, request.responseCode)
class StaticProducerTests(TestCase):
"""
Tests for the abstract L{StaticProducer}.
"""
def test_stopProducingClosesFile(self):
"""
L{StaticProducer.stopProducing} closes the file object the producer is
producing data from.
"""
fileObject = StringIO.StringIO()
producer = static.StaticProducer(None, fileObject)
producer.stopProducing()
self.assertTrue(fileObject.closed)
def test_stopProducingSetsRequestToNone(self):
"""
L{StaticProducer.stopProducing} sets the request instance variable to
None, which indicates to subclasses' resumeProducing methods that no
more data should be produced.
"""
fileObject = StringIO.StringIO()
producer = static.StaticProducer(DummyRequest([]), fileObject)
producer.stopProducing()
self.assertIdentical(None, producer.request)
class NoRangeStaticProducerTests(TestCase):
"""
Tests for L{NoRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{NoRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(
interfaces.IPullProducer,
static.NoRangeStaticProducer(None, None))
def test_resumeProducingProducesContent(self):
"""
L{NoRangeStaticProducer.resumeProducing} writes content from the
resource to the request.
"""
request = DummyRequest([])
content = 'abcdef'
producer = static.NoRangeStaticProducer(
request, StringIO.StringIO(content))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual(content, ''.join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{NoRangeStaticProducer.start} writes at most
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
"""
request = DummyRequest([])
bufferSize = abstract.FileDescriptor.bufferSize
content = 'a' * (2*bufferSize + 1)
producer = static.NoRangeStaticProducer(
request, StringIO.StringIO(content))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
expected = [
content[0:bufferSize],
content[bufferSize:2*bufferSize],
content[2*bufferSize:]
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{NoRangeStaticProducer.resumeProducing} calls finish() on the request
after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.NoRangeStaticProducer(
request, StringIO.StringIO('abcdef'))
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class SingleRangeStaticProducerTests(TestCase):
"""
Tests for L{SingleRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{SingleRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(
interfaces.IPullProducer,
static.SingleRangeStaticProducer(None, None, None, None))
def test_resumeProducingProducesContent(self):
"""
L{SingleRangeStaticProducer.resumeProducing} writes the given amount
of content, starting at the given offset, from the resource to the
request.
"""
request = DummyRequest([])
content = 'abcdef'
producer = static.SingleRangeStaticProducer(
request, StringIO.StringIO(content), 1, 3)
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
self.assertEqual(content[1:4], ''.join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{SingleRangeStaticProducer.start} writes at most
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
"""
request = DummyRequest([])
bufferSize = abstract.FileDescriptor.bufferSize
content = 'abc' * bufferSize
producer = static.SingleRangeStaticProducer(
request, StringIO.StringIO(content), 1, bufferSize+10)
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
expected = [
content[1:bufferSize+1],
content[bufferSize+1:bufferSize+11],
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{SingleRangeStaticProducer.resumeProducing} calls finish() on the
request after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.SingleRangeStaticProducer(
request, StringIO.StringIO('abcdef'), 1, 1)
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class MultipleRangeStaticProducerTests(TestCase):
"""
Tests for L{MultipleRangeStaticProducer}.
"""
def test_implementsIPullProducer(self):
"""
L{MultipleRangeStaticProducer} implements L{IPullProducer}.
"""
verifyObject(
interfaces.IPullProducer,
static.MultipleRangeStaticProducer(None, None, None))
def test_resumeProducingProducesContent(self):
"""
L{MultipleRangeStaticProducer.resumeProducing} writes the requested
chunks of content from the resource to the request, with the supplied
boundaries in between each chunk.
"""
request = DummyRequest([])
content = 'abcdef'
producer = static.MultipleRangeStaticProducer(
request, StringIO.StringIO(content), [('1', 1, 3), ('2', 5, 1)])
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
self.assertEqual('1bcd2f', ''.join(request.written))
def test_resumeProducingBuffersOutput(self):
"""
L{MultipleRangeStaticProducer.start} writes about
C{abstract.FileDescriptor.bufferSize} bytes of content from the
resource to the request at once.
To be specific about the 'about' above: it can write slightly more,
for example in the case where the first boundary plus the first chunk
is less than C{bufferSize} but first boundary plus the first chunk
plus the second boundary is more, but this is unimportant as in
practice the boundaries are fairly small. On the other side, it is
important for performance to bundle up several small chunks into one
call to request.write.
"""
request = DummyRequest([])
content = '0123456789' * 2
producer = static.MultipleRangeStaticProducer(
request, StringIO.StringIO(content),
[('a', 0, 2), ('b', 5, 10), ('c', 0, 0)])
producer.bufferSize = 10
# DummyRequest.registerProducer pulls all output from the producer, so
# we just need to call start.
producer.start()
expected = [
'a' + content[0:2] + 'b' + content[5:11],
content[11:15] + 'c',
]
self.assertEqual(expected, request.written)
def test_finishCalledWhenDone(self):
"""
L{MultipleRangeStaticProducer.resumeProducing} calls finish() on the
request after it is done producing content.
"""
request = DummyRequest([])
finishDeferred = request.notifyFinish()
callbackList = []
finishDeferred.addCallback(callbackList.append)
producer = static.MultipleRangeStaticProducer(
request, StringIO.StringIO('abcdef'), [('', 1, 2)])
# start calls registerProducer on the DummyRequest, which pulls all
# output from the producer and so we just need this one call.
producer.start()
self.assertEqual([None], callbackList)
class RangeTests(TestCase):
"""
Tests for I{Range-Header} support in L{twisted.web.static.File}.
@type file: L{file}
@ivar file: Temporary (binary) file containing the content to be served.
@type resource: L{static.File}
@ivar resource: A leaf web resource using C{file} as content.
@type request: L{DummyRequest}
@ivar request: A fake request, requesting C{resource}.
@type catcher: L{list}
@ivar catcher: List which gathers all log information.
"""
def setUp(self):
"""
Create a temporary file with a fixed payload of 64 bytes. Create a
resource for that file and create a request which will be for that
resource. Each test can set a different range header to test different
aspects of the implementation.
"""
path = FilePath(self.mktemp())
# This is just a jumble of random stuff. It's supposed to be a good
# set of data for this test, particularly in order to avoid
# accidentally seeing the right result by having a byte sequence
# repeated at different locations or by having byte values which are
# somehow correlated with their position in the string.
self.payload = ('\xf8u\xf3E\x8c7\xce\x00\x9e\xb6a0y0S\xf0\xef\xac\xb7'
'\xbe\xb5\x17M\x1e\x136k{\x1e\xbe\x0c\x07\x07\t\xd0'
'\xbckY\xf5I\x0b\xb8\x88oZ\x1d\x85b\x1a\xcdk\xf2\x1d'
'&\xfd%\xdd\x82q/A\x10Y\x8b')
path.setContent(self.payload)
self.file = path.open()
self.resource = static.File(self.file.name)
self.resource.isLeaf = 1
self.request = DummyRequest([''])
self.request.uri = self.file.name
self.catcher = []
log.addObserver(self.catcher.append)
def tearDown(self):
"""
Clean up the resource file and the log observer.
"""
self.file.close()
log.removeObserver(self.catcher.append)
def _assertLogged(self, expected):
"""
Asserts that a given log message occurred with an expected message.
"""
logItem = self.catcher.pop()
self.assertEquals(logItem["message"][0], expected)
self.assertEqual(
self.catcher, [], "An additional log occured: %r" % (logItem,))
def test_invalidRanges(self):
"""
L{File._parseRangeHeader} raises L{ValueError} when passed
syntactically invalid byte ranges.
"""
f = self.resource._parseRangeHeader
# there's no =
self.assertRaises(ValueError, f, 'bytes')
# unknown isn't a valid Bytes-Unit
self.assertRaises(ValueError, f, 'unknown=1-2')
# there's no - in =stuff
self.assertRaises(ValueError, f, 'bytes=3')
# both start and end are empty
self.assertRaises(ValueError, f, 'bytes=-')
# start isn't an integer
self.assertRaises(ValueError, f, 'bytes=foo-')
# end isn't an integer
self.assertRaises(ValueError, f, 'bytes=-foo')
# end isn't equal to or greater than start
self.assertRaises(ValueError, f, 'bytes=5-4')
def test_rangeMissingStop(self):
"""
A single bytes range without an explicit stop position is parsed into a
two-tuple giving the start position and C{None}.
"""
self.assertEqual(
self.resource._parseRangeHeader('bytes=0-'), [(0, None)])
def test_rangeMissingStart(self):
"""
A single bytes range without an explicit start position is parsed into
a two-tuple of C{None} and the end position.
"""
self.assertEqual(
self.resource._parseRangeHeader('bytes=-3'), [(None, 3)])
def test_range(self):
"""
A single bytes range with explicit start and stop positions is parsed
into a two-tuple of those positions.
"""
self.assertEqual(
self.resource._parseRangeHeader('bytes=2-5'), [(2, 5)])
def test_rangeWithSpace(self):
"""
A single bytes range with whitespace in allowed places is parsed in
the same way as it would be without the whitespace.
"""
self.assertEqual(
self.resource._parseRangeHeader(' bytes=1-2 '), [(1, 2)])
self.assertEqual(
self.resource._parseRangeHeader('bytes =1-2 '), [(1, 2)])
self.assertEqual(
self.resource._parseRangeHeader('bytes= 1-2'), [(1, 2)])
self.assertEqual(
self.resource._parseRangeHeader('bytes=1 -2'), [(1, 2)])
self.assertEqual(
self.resource._parseRangeHeader('bytes=1- 2'), [(1, 2)])
self.assertEqual(
self.resource._parseRangeHeader('bytes=1-2 '), [(1, 2)])
def test_nullRangeElements(self):
"""
If there are multiple byte ranges but only one is non-null, the
non-null range is parsed and its start and stop returned.
"""
self.assertEqual(
self.resource._parseRangeHeader('bytes=1-2,\r\n, ,\t'), [(1, 2)])
def test_multipleRanges(self):
"""
If multiple byte ranges are specified their starts and stops are
returned.
"""
self.assertEqual(
self.resource._parseRangeHeader('bytes=1-2,3-4'),
[(1, 2), (3, 4)])
def test_bodyLength(self):
"""
A correct response to a range request is as long as the length of the
requested range.
"""
self.request.headers['range'] = 'bytes=0-43'
self.resource.render(self.request)
self.assertEquals(len(''.join(self.request.written)), 44)
def test_invalidRangeRequest(self):
"""
An incorrect range request (RFC 2616 defines a correct range request as
a Bytes-Unit followed by a '=' character followed by a specific range.
Only 'bytes' is defined) results in the range header value being logged
and a normal 200 response being sent.
"""
self.request.headers['range'] = range = 'foobar=0-43'
self.resource.render(self.request)
expected = "Ignoring malformed Range header %r" % (range,)
self._assertLogged(expected)
self.assertEquals(''.join(self.request.written), self.payload)
self.assertEquals(self.request.responseCode, http.OK)
self.assertEquals(
self.request.outgoingHeaders['content-length'],
str(len(self.payload)))
def parseMultipartBody(self, body, boundary):
"""
Parse C{body} as a multipart MIME response separated by C{boundary}.
Note that this with fail the calling test on certain syntactic
problems.
"""
sep = "\r\n--" + boundary
parts = ''.join(body).split(sep)
self.assertEquals('', parts[0])
self.assertEquals('--\r\n', parts[-1])
parsed_parts = []
for part in parts[1:-1]:
before, header1, header2, blank, partBody = part.split('\r\n', 4)
headers = header1 + '\n' + header2
self.assertEqual('', before)
self.assertEqual('', blank)
partContentTypeValue = re.search(
'^content-type: (.*)$', headers, re.I|re.M).group(1)
start, end, size = re.search(
'^content-range: bytes ([0-9]+)-([0-9]+)/([0-9]+)$',
headers, re.I|re.M).groups()
parsed_parts.append(
{'contentType': partContentTypeValue,
'contentRange': (start, end, size),
'body': partBody})
return parsed_parts
def test_multipleRangeRequest(self):
"""
The response to a request for multipe bytes ranges is a MIME-ish
multipart response.
"""
startEnds = [(0, 2), (20, 30), (40, 50)]
rangeHeaderValue = ','.join(["%s-%s"%(s,e) for (s, e) in startEnds])
self.request.headers['range'] = 'bytes=' + rangeHeaderValue
self.resource.render(self.request)
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
boundary = re.match(
'^multipart/byteranges; boundary="(.*)"$',
self.request.outgoingHeaders['content-type']).group(1)
parts = self.parseMultipartBody(''.join(self.request.written), boundary)
self.assertEquals(len(startEnds), len(parts))
for part, (s, e) in zip(parts, startEnds):
self.assertEqual(self.resource.type, part['contentType'])
start, end, size = part['contentRange']
self.assertEqual(int(start), s)
self.assertEqual(int(end), e)
self.assertEqual(int(size), self.resource.getFileSize())
self.assertEqual(self.payload[s:e+1], part['body'])
def test_multipleRangeRequestWithRangeOverlappingEnd(self):
"""
The response to a request for multipe bytes ranges is a MIME-ish
multipart response, even when one of the ranged falls off the end of
the resource.
"""
startEnds = [(0, 2), (40, len(self.payload) + 10)]
rangeHeaderValue = ','.join(["%s-%s"%(s,e) for (s, e) in startEnds])
self.request.headers['range'] = 'bytes=' + rangeHeaderValue
self.resource.render(self.request)
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
boundary = re.match(
'^multipart/byteranges; boundary="(.*)"$',
self.request.outgoingHeaders['content-type']).group(1)
parts = self.parseMultipartBody(''.join(self.request.written), boundary)
self.assertEquals(len(startEnds), len(parts))
for part, (s, e) in zip(parts, startEnds):
self.assertEqual(self.resource.type, part['contentType'])
start, end, size = part['contentRange']
self.assertEqual(int(start), s)
self.assertEqual(int(end), min(e, self.resource.getFileSize()-1))
self.assertEqual(int(size), self.resource.getFileSize())
self.assertEqual(self.payload[s:e+1], part['body'])
def test_implicitEnd(self):
"""
If the end byte position is omitted, then it is treated as if the
length of the resource was specified by the end byte position.
"""
self.request.headers['range'] = 'bytes=23-'
self.resource.render(self.request)
self.assertEquals(''.join(self.request.written), self.payload[23:])
self.assertEquals(len(''.join(self.request.written)), 41)
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEquals(
self.request.outgoingHeaders['content-range'], 'bytes 23-63/64')
self.assertEquals(self.request.outgoingHeaders['content-length'], '41')
def test_implicitStart(self):
"""
If the start byte position is omitted but the end byte position is
supplied, then the range is treated as requesting the last -N bytes of
the resource, where N is the end byte position.
"""
self.request.headers['range'] = 'bytes=-17'
self.resource.render(self.request)
self.assertEquals(''.join(self.request.written), self.payload[-17:])
self.assertEquals(len(''.join(self.request.written)), 17)
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEquals(
self.request.outgoingHeaders['content-range'], 'bytes 47-63/64')
self.assertEquals(self.request.outgoingHeaders['content-length'], '17')
def test_explicitRange(self):
"""
A correct response to a bytes range header request from A to B starts
with the A'th byte and ends with (including) the B'th byte. The first
byte of a page is numbered with 0.
"""
self.request.headers['range'] = 'bytes=3-43'
self.resource.render(self.request)
written = ''.join(self.request.written)
self.assertEquals(written, self.payload[3:44])
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEquals(
self.request.outgoingHeaders['content-range'], 'bytes 3-43/64')
self.assertEquals(
str(len(written)), self.request.outgoingHeaders['content-length'])
def test_explicitRangeOverlappingEnd(self):
"""
A correct response to a bytes range header request from A to B when B
is past the end of the resource starts with the A'th byte and ends
with the last byte of the resource. The first byte of a page is
numbered with 0.
"""
self.request.headers['range'] = 'bytes=40-100'
self.resource.render(self.request)
written = ''.join(self.request.written)
self.assertEquals(written, self.payload[40:])
self.assertEquals(self.request.responseCode, http.PARTIAL_CONTENT)
self.assertEquals(
self.request.outgoingHeaders['content-range'], 'bytes 40-63/64')
self.assertEquals(
str(len(written)), self.request.outgoingHeaders['content-length'])
def test_statusCodeRequestedRangeNotSatisfiable(self):
"""
If a range is syntactically invalid due to the start being greater than
the end, the range header is ignored (the request is responded to as if
it were not present).
"""
self.request.headers['range'] = 'bytes=20-13'
self.resource.render(self.request)
self.assertEquals(self.request.responseCode, http.OK)
self.assertEquals(''.join(self.request.written), self.payload)
self.assertEquals(
self.request.outgoingHeaders['content-length'],
str(len(self.payload)))
def test_invalidStartBytePos(self):
"""
If a range is unsatisfiable due to the start not being less than the
length of the resource, the response is 416 (Requested range not
satisfiable) and no data is written to the response body (RFC 2616,
section 14.35.1).
"""
self.request.headers['range'] = 'bytes=67-108'
self.resource.render(self.request)
self.assertEquals(
self.request.responseCode, http.REQUESTED_RANGE_NOT_SATISFIABLE)
self.assertEquals(''.join(self.request.written), '')
self.assertEquals(self.request.outgoingHeaders['content-length'], '0')
# Sections 10.4.17 and 14.16
self.assertEquals(
self.request.outgoingHeaders['content-range'],
'bytes */%d' % (len(self.payload),))
class DirectoryListerTest(TestCase):
"""
Tests for L{static.DirectoryLister}.
"""
def _request(self, uri):
request = DummyRequest([''])
request.uri = uri
return request
def test_renderHeader(self):
"""
L{static.DirectoryLister} prints the request uri as header of the
rendered content.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request('foo'))
self.assertIn("<h1>Directory listing for foo</h1>", data)
self.assertIn("<title>Directory listing for foo</title>", data)
def test_renderUnquoteHeader(self):
"""
L{static.DirectoryLister} unquote the request uri before printing it.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request('foo%20bar'))
self.assertIn("<h1>Directory listing for foo bar</h1>", data)
self.assertIn("<title>Directory listing for foo bar</title>", data)
def test_escapeHeader(self):
"""
L{static.DirectoryLister} escape "&", "<" and ">" after unquoting the
request uri.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request('foo%26bar'))
self.assertIn("<h1>Directory listing for foo&bar</h1>", data)
self.assertIn("<title>Directory listing for foo&bar</title>", data)
def test_renderFiles(self):
"""
L{static.DirectoryLister} is able to list all the files inside a
directory.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child('file1').setContent("content1")
path.child('file2').setContent("content2" * 1000)
lister = static.DirectoryLister(path.path)
data = lister.render(self._request('foo'))
body = """<tr class="odd">
<td><a href="file1">file1</a></td>
<td>8B</td>
<td>[text/html]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="file2">file2</a></td>
<td>7K</td>
<td>[text/html]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_renderDirectories(self):
"""
L{static.DirectoryLister} is able to list all the directories inside
a directory.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child('dir1').makedirs()
path.child('dir2 & 3').makedirs()
lister = static.DirectoryLister(path.path)
data = lister.render(self._request('foo'))
body = """<tr class="odd">
<td><a href="dir1/">dir1/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="dir2%20%26%203/">dir2 & 3/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_renderFiltered(self):
"""
L{static.DirectoryLister} takes a optional C{dirs} argument that
filter out the list of of directories and files printed.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child('dir1').makedirs()
path.child('dir2').makedirs()
path.child('dir3').makedirs()
lister = static.DirectoryLister(path.path, dirs=["dir1", "dir3"])
data = lister.render(self._request('foo'))
body = """<tr class="odd">
<td><a href="dir1/">dir1/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>
<tr class="even">
<td><a href="dir3/">dir3/</a></td>
<td></td>
<td>[Directory]</td>
<td></td>
</tr>"""
self.assertIn(body, data)
def test_oddAndEven(self):
"""
L{static.DirectoryLister} gives an alternate class for each odd and
even rows in the table.
"""
lister = static.DirectoryLister(None)
elements = [{"href": "", "text": "", "size": "", "type": "",
"encoding": ""} for i in xrange(5)]
content = lister._buildTableContent(elements)
self.assertEquals(len(content), 5)
self.assertTrue(content[0].startswith('<tr class="odd">'))
self.assertTrue(content[1].startswith('<tr class="even">'))
self.assertTrue(content[2].startswith('<tr class="odd">'))
self.assertTrue(content[3].startswith('<tr class="even">'))
self.assertTrue(content[4].startswith('<tr class="odd">'))
def test_mimeTypeAndEncodings(self):
"""
L{static.DirectoryLister} is able to detect mimetype and encoding of
listed files.
"""
path = FilePath(self.mktemp())
path.makedirs()
path.child('file1.txt').setContent("file1")
path.child('file2.py').setContent("python")
path.child('file3.conf.gz').setContent("conf compressed")
path.child('file4.diff.bz2').setContent("diff compressed")
directory = os.listdir(path.path)
directory.sort()
contentTypes = {
".txt": "text/plain",
".py": "text/python",
".conf": "text/configuration",
".diff": "text/diff"
}
lister = static.DirectoryLister(path.path, contentTypes=contentTypes)
dirs, files = lister._getFilesAndDirectories(directory)
self.assertEquals(dirs, [])
self.assertEquals(files, [
{'encoding': '',
'href': 'file1.txt',
'size': '5B',
'text': 'file1.txt',
'type': '[text/plain]'},
{'encoding': '',
'href': 'file2.py',
'size': '6B',
'text': 'file2.py',
'type': '[text/python]'},
{'encoding': '[gzip]',
'href': 'file3.conf.gz',
'size': '15B',
'text': 'file3.conf.gz',
'type': '[text/configuration]'},
{'encoding': '[bzip2]',
'href': 'file4.diff.bz2',
'size': '15B',
'text': 'file4.diff.bz2',
'type': '[text/diff]'}])
def test_brokenSymlink(self):
"""
If on the file in the listing points to a broken symlink, it should not
be returned by L{static.DirectoryLister._getFilesAndDirectories}.
"""
path = FilePath(self.mktemp())
path.makedirs()
file1 = path.child('file1')
file1.setContent("file1")
file1.linkTo(path.child("file2"))
file1.remove()
lister = static.DirectoryLister(path.path)
directory = os.listdir(path.path)
directory.sort()
dirs, files = lister._getFilesAndDirectories(directory)
self.assertEquals(dirs, [])
self.assertEquals(files, [])
if getattr(os, "symlink", None) is None:
test_brokenSymlink.skip = "No symlink support"
def test_childrenNotFound(self):
"""
Any child resource of L{static.DirectoryLister} renders an HTTP
I{NOT FOUND} response code.
"""
path = FilePath(self.mktemp())
path.makedirs()
lister = static.DirectoryLister(path.path)
request = self._request('')
child = resource.getChildForRequest(lister, request)
result = _render(child, request)
def cbRendered(ignored):
self.assertEquals(request.responseCode, http.NOT_FOUND)
result.addCallback(cbRendered)
return result
def test_repr(self):
"""
L{static.DirectoryLister.__repr__} gives the path of the lister.
"""
path = FilePath(self.mktemp())
lister = static.DirectoryLister(path.path)
self.assertEquals(repr(lister),
"<DirectoryLister of %r>" % (path.path,))
self.assertEquals(str(lister),
"<DirectoryLister of %r>" % (path.path,))
def test_formatFileSize(self):
"""
L{static.formatFileSize} format an amount of bytes into a more readable
format.
"""
self.assertEquals(static.formatFileSize(0), "0B")
self.assertEquals(static.formatFileSize(123), "123B")
self.assertEquals(static.formatFileSize(4567), "4K")
self.assertEquals(static.formatFileSize(8900000), "8M")
self.assertEquals(static.formatFileSize(1234000000), "1G")
self.assertEquals(static.formatFileSize(1234567890000), "1149G")
class TestFileTransferDeprecated(TestCase):
"""
L{static.FileTransfer} is deprecated.
"""
def test_deprecation(self):
"""
Instantiation of L{FileTransfer} produces a deprecation warning.
"""
static.FileTransfer(StringIO.StringIO(), 0, DummyRequest([]))
warnings = self.flushWarnings([self.test_deprecation])
self.assertEqual(len(warnings), 1)
self.assertEqual(warnings[0]['category'], DeprecationWarning)
self.assertEqual(
warnings[0]['message'],
'FileTransfer is deprecated since Twisted 9.0. '
'Use a subclass of StaticProducer instead.')
| apache-2.0 |
phil888/Design3 | Livrable 4/Projet/source/vision/LocalisationRobot.py | 2 | 4048 | from __future__ import division
import cv2
import numpy as np
from Code.Projet.source.vision.Localisation import Localisation
from math import *
class LocalisationRobot(Localisation):
def __init__(self):
Localisation.__init__(self)
self.minimumPerimetreRobot = 300
self.maximumPerimetreRobot = 750
self.basSV_Mauve = 65
self.basSV_Orange = 160
self.hauteurTable = 180
self.hauteurRobot = 35
self.centreCameraX = 800
self.centreCameraY = 600
self.angleDemiCamera = 22.5
self.mauve = np.uint8([[[255, 0, 165]]])
self.hsv_mauve = cv2.cvtColor(self.mauve, cv2.COLOR_BGR2HSV)
self.hsv_orange = 7
self.bas_mauve = np.array([self.hsv_mauve.item(0) - self.deltaHue, self.basSV_Mauve, self.basSV_Mauve])
self.bas_orange = np.array([self.hsv_orange - self.deltaHue, self.basSV_iles, self.basSV_Orange])
self.haut_mauve = np.array([self.hsv_mauve.item(0) + self.deltaHue, self.hautSV_iles, self.hautSV_iles])
self.haut_orange = np.array([self.hsv_orange + self.deltaHue, self.hautSV_iles, self.hautSV_iles])
def trouverRectangle(self, image, montrerImage, basCouleur, hautCouleur, ratio):
centreX = 0
centreY = 0
pourcentageDeContour = 0.03
imageHSV = cv2.cvtColor(image, cv2.COLOR_BGR2HSV)
imageCouleur = cv2.inRange(imageHSV, basCouleur, hautCouleur)
image = cv2.bitwise_and(image, image, mask=imageCouleur)
imageGrise = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
if (montrerImage): cv2.imshow("photo", image)
cv2.waitKey(0)
contours, hierarchie = cv2.findContours(imageGrise, cv2.RETR_TREE, cv2.CHAIN_APPROX_NONE)
for contour in contours:
epsilon = pourcentageDeContour * cv2.arcLength(contour, True)
perimetre = cv2.approxPolyDP(contour,epsilon, True)
if self.minimumPerimetreRobot < cv2.arcLength(perimetre, True) < self.maximumPerimetreRobot:
centreX, centreY = self.trouverCentre(contour)
cv2.circle(image, (centreX, centreY), 5, (255, 255, 0), 5)
if (montrerImage): cv2.imshow("photo", image)
cv2.waitKey(0)
angle = (centreX - self.centreCameraX) / self.centreCameraX * self.angleDemiCamera
baseRobot = tan(radians(angle)) * self.hauteurRobot / ratio
positionXReel = centreX - baseRobot
angle = (centreY - self.centreCameraY) / self.centreCameraY * self.angleDemiCamera
baseRobot = tan(radians(angle)) * self.hauteurRobot / ratio
positionYReel = centreY - baseRobot
return positionXReel,positionYReel
def trouverRobot(self, image, ratio, montrerImage = False):
centreXOrange, centreYOrange = self.trouverRectangle(image,montrerImage,self.bas_orange,self.haut_orange,ratio)
centreXMauve, centreYMauve = self.trouverRectangle(image,montrerImage,self.bas_mauve,self.haut_mauve,ratio)
demiRobotEnPixel = 78
if centreXOrange == 0 and centreYOrange == 0:
centreYOrange = centreYMauve
if centreXMauve < self.centreCameraX:
centreXOrange = centreXMauve - demiRobotEnPixel
else:
centreXOrange = centreXMauve + demiRobotEnPixel
if centreXMauve == 0 and centreYMauve == 0:
centreYMauve = centreYOrange
if centreXOrange < self.centreCameraX:
centreXMauve = centreXOrange - demiRobotEnPixel
else:
centreXMauve = centreXOrange + demiRobotEnPixel
hauteur = centreYOrange - centreYMauve
base = centreXMauve - centreXOrange
orientation = degrees(atan2(hauteur, base)) + 90
if orientation < 0:
orientation = 360 + orientation
X = (centreXMauve + centreXOrange)/2
Y = (centreYMauve + centreYOrange)/2
return (X, Y), orientation | gpl-3.0 |
loretoparisi/nupic | nupic/support/fshelpers.py | 40 | 1547 | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
# This script contains file-system helper functions
import os
def makeDirectoryFromAbsolutePath(absDirPath):
""" Makes directory for the given directory path with default permissions.
If the directory already exists, it is treated as success.
absDirPath: absolute path of the directory to create.
Returns: absDirPath arg
Exceptions: OSError if directory creation fails
"""
assert os.path.isabs(absDirPath)
try:
os.makedirs(absDirPath)
except OSError, e:
if e.errno != os.errno.EEXIST:
raise
return absDirPath
| agpl-3.0 |
wreckJ/intellij-community | python/lib/Lib/asyncore.py | 70 | 16725 | # -*- Mode: Python -*-
# Id: asyncore.py,v 2.51 2000/09/07 22:29:26 rushing Exp
# Author: Sam Rushing <rushing@nightmare.com>
# ======================================================================
# Copyright 1996 by Sam Rushing
#
# All Rights Reserved
#
# Permission to use, copy, modify, and distribute this software and
# its documentation for any purpose and without fee is hereby
# granted, provided that the above copyright notice appear in all
# copies and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of Sam
# Rushing not be used in advertising or publicity pertaining to
# distribution of the software without specific, written prior
# permission.
#
# SAM RUSHING DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE,
# INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN
# NO EVENT SHALL SAM RUSHING BE LIABLE FOR ANY SPECIAL, INDIRECT OR
# CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS
# OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
# NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN
# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
# ======================================================================
"""Basic infrastructure for asynchronous socket service clients and servers.
There are only two ways to have a program on a single processor do "more
than one thing at a time". Multi-threaded programming is the simplest and
most popular way to do it, but there is another very different technique,
that lets you have nearly all the advantages of multi-threading, without
actually using multiple threads. it's really only practical if your program
is largely I/O bound. If your program is CPU bound, then pre-emptive
scheduled threads are probably what you really need. Network servers are
rarely CPU-bound, however.
If your operating system supports the select() system call in its I/O
library (and nearly all do), then you can use it to juggle multiple
communication channels at once; doing other work while your I/O is taking
place in the "background." Although this strategy can seem strange and
complex, especially at first, it is in many ways easier to understand and
control than multi-threaded programming. The module documented here solves
many of the difficult problems for you, making the task of building
sophisticated high-performance network servers and clients a snap.
"""
import select
import socket
import sys
import time
import os
from errno import EALREADY, EINPROGRESS, EWOULDBLOCK, ECONNRESET, \
ENOTCONN, ESHUTDOWN, EINTR, EISCONN, errorcode
try:
socket_map
except NameError:
socket_map = {}
class ExitNow(Exception):
pass
def read(obj):
try:
obj.handle_read_event()
except ExitNow:
raise
except:
obj.handle_error()
def write(obj):
try:
obj.handle_write_event()
except ExitNow:
raise
except:
obj.handle_error()
def _exception (obj):
try:
obj.handle_expt_event()
except ExitNow:
raise
except:
obj.handle_error()
def readwrite(obj, flags):
try:
if flags & (select.POLLIN | select.POLLPRI):
obj.handle_read_event()
if flags & select.POLLOUT:
obj.handle_write_event()
if flags & (select.POLLERR | select.POLLHUP | select.POLLNVAL):
obj.handle_expt_event()
except ExitNow:
raise
except:
obj.handle_error()
def poll(timeout=0.0, map=None):
if map is None:
map = socket_map
if map:
r = []; w = []; e = []
for fd, obj in map.items():
is_r = obj.readable()
is_w = obj.writable()
if is_r:
r.append(fd)
if is_w:
w.append(fd)
if is_r or is_w:
e.append(fd)
if [] == r == w == e:
time.sleep(timeout)
else:
try:
r, w, e = select.select(r, w, e, timeout)
except select.error, err:
if err[0] != EINTR:
raise
else:
return
for fd in r:
obj = map.get(fd)
if obj is None:
continue
read(obj)
for fd in w:
obj = map.get(fd)
if obj is None:
continue
write(obj)
for fd in e:
obj = map.get(fd)
if obj is None:
continue
_exception(obj)
def poll2(timeout=0.0, map=None):
# Use the poll() support added to the select module in Python 2.0
if map is None:
map = socket_map
if timeout is not None:
# timeout is in milliseconds
timeout = int(timeout*1000)
pollster = select.poll()
if map:
for fd, obj in map.items():
flags = 0
if obj.readable():
flags |= select.POLLIN | select.POLLPRI
if obj.writable():
flags |= select.POLLOUT
if flags:
# Only check for exceptions if object was either readable
# or writable.
flags |= select.POLLERR | select.POLLHUP | select.POLLNVAL
pollster.register(fd, flags)
try:
r = pollster.poll(timeout)
except select.error, err:
if err[0] != EINTR:
raise
r = []
for fd, flags in r:
obj = map.get(fd)
if obj is None:
continue
readwrite(obj, flags)
poll3 = poll2 # Alias for backward compatibility
def loop(timeout=30.0, use_poll=True, map=None, count=None):
if map is None:
map = socket_map
if use_poll and hasattr(select, 'poll'):
poll_fun = poll2
else:
poll_fun = poll
if count is None:
while map:
poll_fun(timeout, map)
else:
while map and count > 0:
poll_fun(timeout, map)
count = count - 1
class dispatcher:
debug = False
connected = False
accepting = False
closing = False
addr = None
def __init__(self, sock=None, map=None):
if map is None:
self._map = socket_map
else:
self._map = map
if sock:
self.set_socket(sock, map)
# I think it should inherit this anyway
self.socket.setblocking(0)
self.connected = True
# XXX Does the constructor require that the socket passed
# be connected?
try:
self.addr = sock.getpeername()
except socket.error:
# The addr isn't crucial
pass
else:
self.socket = None
def __repr__(self):
status = [self.__class__.__module__+"."+self.__class__.__name__]
if self.accepting and self.addr:
status.append('listening')
elif self.connected:
status.append('connected')
if self.addr is not None:
try:
status.append('%s:%d' % self.addr)
except TypeError:
status.append(repr(self.addr))
return '<%s at %#x>' % (' '.join(status), id(self))
def add_channel(self, map=None):
#self.log_info('adding channel %s' % self)
if map is None:
map = self._map
map[self._fileno] = self
def del_channel(self, map=None):
fd = self._fileno
if map is None:
map = self._map
if map.has_key(fd):
#self.log_info('closing channel %d:%s' % (fd, self))
del map[fd]
self._fileno = None
def create_socket(self, family, type):
self.family_and_type = family, type
self.socket = socket.socket(family, type)
self.socket.setblocking(0)
self._fileno = self.socket
self.add_channel()
def set_socket(self, sock, map=None):
self.socket = sock
## self.__dict__['socket'] = sock
self._fileno = sock
self.add_channel(map)
def set_reuse_addr(self):
# try to re-use a server port if possible
try:
self.socket.setsockopt(
socket.SOL_SOCKET, socket.SO_REUSEADDR,
self.socket.getsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR) | 1
)
except socket.error:
pass
# ==================================================
# predicates for select()
# these are used as filters for the lists of sockets
# to pass to select().
# ==================================================
def readable(self):
return True
def writable(self):
return True
# ==================================================
# socket object methods.
# ==================================================
def listen(self, num):
self.accepting = True
if os.name == 'nt' and num > 5:
num = 1
return self.socket.listen(num)
def bind(self, addr):
self.addr = addr
return self.socket.bind(addr)
def connect(self, address):
self.connected = False
err = self.socket.connect_ex(address)
# XXX Should interpret Winsock return values
if err in (EINPROGRESS, EALREADY, EWOULDBLOCK):
return
if err in (0, EISCONN):
self.addr = address
self.connected = True
self.handle_connect()
else:
raise socket.error, (err, errorcode[err])
def accept(self):
# XXX can return either an address pair or None
try:
conn, addr = self.socket.accept()
return conn, addr
except socket.error, why:
if why[0] == EWOULDBLOCK:
pass
else:
raise
def send(self, data):
try:
result = self.socket.send(data)
return result
except socket.error, why:
if why[0] == EWOULDBLOCK:
return 0
else:
raise
return 0
def recv(self, buffer_size):
try:
data = self.socket.recv(buffer_size)
if not data:
# a closed connection is indicated by signaling
# a read condition, and having recv() return 0.
self.handle_close()
return ''
else:
return data
except socket.error, why:
# winsock sometimes throws ENOTCONN
if why[0] in [ECONNRESET, ENOTCONN, ESHUTDOWN]:
self.handle_close()
return ''
else:
raise
def close(self):
self.del_channel()
self.socket.close()
# cheap inheritance, used to pass all other attribute
# references to the underlying socket object.
def __getattr__(self, attr):
return getattr(self.socket, attr)
# log and log_info may be overridden to provide more sophisticated
# logging and warning methods. In general, log is for 'hit' logging
# and 'log_info' is for informational, warning and error logging.
def log(self, message):
sys.stderr.write('log: %s\n' % str(message))
def log_info(self, message, type='info'):
if __debug__ or type != 'info':
print '%s: %s' % (type, message)
def handle_read_event(self):
if self.accepting:
# for an accepting socket, getting a read implies
# that we are connected
if not self.connected:
self.connected = True
self.handle_accept()
elif not self.connected:
self.handle_connect()
self.connected = True
self.handle_read()
else:
self.handle_read()
def handle_write_event(self):
# getting a write implies that we are connected
if not self.connected:
self.handle_connect()
self.connected = True
self.handle_write()
def handle_expt_event(self):
self.handle_expt()
def handle_error(self):
nil, t, v, tbinfo = compact_traceback()
# sometimes a user repr method will crash.
try:
self_repr = repr(self)
except:
self_repr = '<__repr__(self) failed for object at %0x>' % id(self)
self.log_info(
'uncaptured python exception, closing channel %s (%s:%s %s)' % (
self_repr,
t,
v,
tbinfo
),
'error'
)
self.close()
def handle_expt(self):
self.log_info('unhandled exception', 'warning')
def handle_read(self):
self.log_info('unhandled read event', 'warning')
def handle_write(self):
self.log_info('unhandled write event', 'warning')
def handle_connect(self):
self.log_info('unhandled connect event', 'warning')
def handle_accept(self):
self.log_info('unhandled accept event', 'warning')
def handle_close(self):
self.log_info('unhandled close event', 'warning')
self.close()
# ---------------------------------------------------------------------------
# adds simple buffered output capability, useful for simple clients.
# [for more sophisticated usage use asynchat.async_chat]
# ---------------------------------------------------------------------------
class dispatcher_with_send(dispatcher):
def __init__(self, sock=None, map=None):
dispatcher.__init__(self, sock, map)
self.out_buffer = ''
def initiate_send(self):
num_sent = 0
num_sent = dispatcher.send(self, self.out_buffer[:512])
self.out_buffer = self.out_buffer[num_sent:]
def handle_write(self):
self.initiate_send()
def writable(self):
return (not self.connected) or len(self.out_buffer)
def send(self, data):
if self.debug:
self.log_info('sending %s' % repr(data))
self.out_buffer = self.out_buffer + data
self.initiate_send()
# ---------------------------------------------------------------------------
# used for debugging.
# ---------------------------------------------------------------------------
def compact_traceback():
t, v, tb = sys.exc_info()
tbinfo = []
assert tb # Must have a traceback
while tb:
tbinfo.append((
tb.tb_frame.f_code.co_filename,
tb.tb_frame.f_code.co_name,
str(tb.tb_lineno)
))
tb = tb.tb_next
# just to be safe
del tb
file, function, line = tbinfo[-1]
info = ' '.join(['[%s|%s|%s]' % x for x in tbinfo])
return (file, function, line), t, v, info
def close_all(map=None):
if map is None:
map = socket_map
for x in map.values():
x.socket.close()
map.clear()
# Asynchronous File I/O:
#
# After a little research (reading man pages on various unixen, and
# digging through the linux kernel), I've determined that select()
# isn't meant for doing asynchronous file i/o.
# Heartening, though - reading linux/mm/filemap.c shows that linux
# supports asynchronous read-ahead. So _MOST_ of the time, the data
# will be sitting in memory for us already when we go to read it.
#
# What other OS's (besides NT) support async file i/o? [VMS?]
#
# Regardless, this is useful for pipes, and stdin/stdout...
if os.name == 'posix':
import fcntl
class file_wrapper:
# here we override just enough to make a file
# look like a socket for the purposes of asyncore.
def __init__(self, fd):
self.fd = fd
def recv(self, *args):
return os.read(self.fd, *args)
def send(self, *args):
return os.write(self.fd, *args)
read = recv
write = send
def close(self):
os.close(self.fd)
def fileno(self):
return self.fd
class file_dispatcher(dispatcher):
def __init__(self, fd, map=None):
dispatcher.__init__(self, None, map)
self.connected = True
self.set_file(fd)
# set it to non-blocking mode
flags = fcntl.fcntl(fd, fcntl.F_GETFL, 0)
flags = flags | os.O_NONBLOCK
fcntl.fcntl(fd, fcntl.F_SETFL, flags)
def set_file(self, fd):
self._fileno = fd
self.socket = file_wrapper(fd)
self.add_channel()
| apache-2.0 |
thesuperzapper/tensorflow | tensorflow/contrib/layers/python/layers/target_column.py | 125 | 18698 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TargetColumn abstract a single head in the model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.contrib.framework import deprecated
from tensorflow.contrib.losses.python.losses import loss_ops
from tensorflow.contrib.metrics.python.ops import metric_ops
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn
@deprecated(
"2016-11-12", "This file will be removed after the deprecation date."
"Please switch to "
"third_party/tensorflow/contrib/learn/python/learn/estimators/head.py")
def regression_target(label_name=None,
weight_column_name=None,
label_dimension=1):
"""Creates a _TargetColumn for linear regression.
Args:
label_name: String, name of the key in label dict. Can be null if label
is a tensor (single headed models).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
label_dimension: dimension of the target for multilabels.
Returns:
An instance of _TargetColumn
"""
return _RegressionTargetColumn(
loss_fn=_mean_squared_loss,
label_name=label_name,
weight_column_name=weight_column_name,
label_dimension=label_dimension)
# TODO(zakaria): Add logistic_regression_target
@deprecated(
"2016-11-12", "This file will be removed after the deprecation date."
"Please switch to "
"third_party/tensorflow/contrib/learn/python/learn/estimators/head.py")
def multi_class_target(n_classes, label_name=None, weight_column_name=None):
"""Creates a _TargetColumn for multi class single label classification.
The target column uses softmax cross entropy loss.
Args:
n_classes: Integer, number of classes, must be >= 2
label_name: String, name of the key in label dict. Can be null if label
is a tensor (single headed models).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
Returns:
An instance of _MultiClassTargetColumn.
Raises:
ValueError: if n_classes is < 2
"""
if n_classes < 2:
raise ValueError("n_classes must be > 1 for classification.")
if n_classes == 2:
loss_fn = _log_loss_with_two_classes
else:
loss_fn = _softmax_cross_entropy_loss
return _MultiClassTargetColumn(
loss_fn=loss_fn,
n_classes=n_classes,
label_name=label_name,
weight_column_name=weight_column_name)
@deprecated(
"2016-11-12", "This file will be removed after the deprecation date."
"Please switch to "
"third_party/tensorflow/contrib/learn/python/learn/estimators/head.py")
def binary_svm_target(label_name=None, weight_column_name=None):
"""Creates a _TargetColumn for binary classification with SVMs.
The target column uses binary hinge loss.
Args:
label_name: String, name of the key in label dict. Can be null if label
is a tensor (single headed models).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
Returns:
An instance of _TargetColumn.
"""
return _BinarySvmTargetColumn(
label_name=label_name, weight_column_name=weight_column_name)
@deprecated(
"2016-11-12", "This file will be removed after the deprecation date."
"Please switch to "
"third_party/tensorflow/contrib/learn/python/learn/estimators/head.py")
class ProblemType(object):
UNSPECIFIED = 0
CLASSIFICATION = 1
LINEAR_REGRESSION = 2
LOGISTIC_REGRESSION = 3
class _TargetColumn(object):
"""_TargetColumn is the abstraction for a single head in a model.
Args:
loss_fn: a function that returns the loss tensor.
num_label_columns: Integer, number of label columns.
label_name: String, name of the key in label dict. Can be null if label
is a tensor (single headed models).
weight_column_name: A string defining feature column name representing
weights. It is used to down weight or boost examples during training. It
will be multiplied by the loss of the example.
Raises:
ValueError: if loss_fn or n_classes are missing.
"""
def __init__(self, loss_fn, num_label_columns, label_name, weight_column_name,
problem_type):
if not loss_fn:
raise ValueError("loss_fn must be provided")
if num_label_columns is None: # n_classes can be 0
raise ValueError("num_label_columns must be provided")
self._loss_fn = loss_fn
self._num_label_columns = num_label_columns
self._label_name = label_name
self._weight_column_name = weight_column_name
self._problem_type = problem_type
def logits_to_predictions(self, logits, proba=False):
# Abstrat, Subclasses must implement.
raise NotImplementedError()
def get_eval_ops(self, features, logits, labels, metrics=None):
"""Returns eval op."""
raise NotImplementedError
@property
def label_name(self):
return self._label_name
@property
def weight_column_name(self):
return self._weight_column_name
@property
def num_label_columns(self):
return self._num_label_columns
def get_weight_tensor(self, features):
if not self._weight_column_name:
return None
else:
return array_ops.reshape(
math_ops.to_float(features[self._weight_column_name]), shape=(-1,))
@property
def problem_type(self):
return self._problem_type
def _weighted_loss(self, loss, weight_tensor):
"""Returns cumulative weighted loss."""
unweighted_loss = array_ops.reshape(loss, shape=(-1,))
weighted_loss = math_ops.multiply(unweighted_loss,
array_ops.reshape(
weight_tensor, shape=(-1,)))
return weighted_loss
def training_loss(self, logits, target, features, name="training_loss"):
"""Returns training loss tensor for this head.
Training loss is different from the loss reported on the tensorboard as we
should respect the example weights when computing the gradient.
L = sum_{i} w_{i} * l_{i} / B
where B is the number of examples in the batch, l_{i}, w_{i} are individual
losses, and example weight.
Args:
logits: logits, a float tensor.
target: either a tensor for labels or in multihead case, a dict of string
to target tensor.
features: features dict.
name: Op name.
Returns:
Loss tensor.
"""
target = target[self.name] if isinstance(target, dict) else target
loss_unweighted = self._loss_fn(logits, target)
weight_tensor = self.get_weight_tensor(features)
if weight_tensor is None:
return math_ops.reduce_mean(loss_unweighted, name=name)
loss_weighted = self._weighted_loss(loss_unweighted, weight_tensor)
return math_ops.reduce_mean(loss_weighted, name=name)
def loss(self, logits, target, features):
"""Returns loss tensor for this head.
The loss returned is the weighted average.
L = sum_{i} w_{i} * l_{i} / sum_{i} w_{i}
Args:
logits: logits, a float tensor.
target: either a tensor for labels or in multihead case, a dict of string
to target tensor.
features: features dict.
Returns:
Loss tensor.
"""
target = target[self.name] if isinstance(target, dict) else target
loss_unweighted = self._loss_fn(logits, target)
weight_tensor = self.get_weight_tensor(features)
if weight_tensor is None:
return math_ops.reduce_mean(loss_unweighted, name="loss")
loss_weighted = self._weighted_loss(loss_unweighted, weight_tensor)
return math_ops.div(math_ops.reduce_sum(loss_weighted),
math_ops.to_float(math_ops.reduce_sum(weight_tensor)),
name="loss")
class _RegressionTargetColumn(_TargetColumn):
"""_TargetColumn for regression."""
def __init__(self, loss_fn, label_name, weight_column_name, label_dimension):
super(_RegressionTargetColumn, self).__init__(
loss_fn=loss_fn,
num_label_columns=label_dimension,
label_name=label_name,
weight_column_name=weight_column_name,
problem_type=ProblemType.LINEAR_REGRESSION)
def logits_to_predictions(self, logits, proba=False):
if self.num_label_columns == 1:
return array_ops.squeeze(logits, squeeze_dims=[1])
return logits
def get_eval_ops(self, features, logits, labels, metrics=None):
loss = self.loss(logits, labels, features)
result = {"loss": metric_ops.streaming_mean(loss)}
if metrics:
predictions = self.logits_to_predictions(logits, proba=False)
result.update(
_run_metrics(predictions, labels, metrics,
self.get_weight_tensor(features)))
return result
class _MultiClassTargetColumn(_TargetColumn):
"""_TargetColumn for classification."""
# TODO(zakaria): support multilabel.
def __init__(self, loss_fn, n_classes, label_name, weight_column_name):
if n_classes < 2:
raise ValueError("n_classes must be >= 2")
super(_MultiClassTargetColumn, self).__init__(
loss_fn=loss_fn,
num_label_columns=1 if n_classes == 2 else n_classes,
label_name=label_name,
weight_column_name=weight_column_name,
problem_type=ProblemType.CLASSIFICATION)
def logits_to_predictions(self, logits, proba=False):
if self.num_label_columns == 1:
logits = array_ops.concat([array_ops.zeros_like(logits), logits], 1)
if proba:
return nn.softmax(logits)
else:
return math_ops.argmax(logits, 1)
def _default_eval_metrics(self):
if self._num_label_columns == 1:
return get_default_binary_metrics_for_eval(thresholds=[.5])
return {}
def get_eval_ops(self, features, logits, labels, metrics=None):
loss = self.loss(logits, labels, features)
result = {"loss": metric_ops.streaming_mean(loss)}
# Adds default metrics.
if metrics is None:
# TODO(b/29366811): This currently results in both an "accuracy" and an
# "accuracy/threshold_0.500000_mean" metric for binary classification.
metrics = {("accuracy", "classes"): metric_ops.streaming_accuracy}
predictions = math_ops.sigmoid(logits)
labels_float = math_ops.to_float(labels)
default_metrics = self._default_eval_metrics()
for metric_name, metric_op in default_metrics.items():
result[metric_name] = metric_op(predictions, labels_float)
class_metrics = {}
proba_metrics = {}
for name, metric_op in six.iteritems(metrics):
if isinstance(name, tuple):
if len(name) != 2:
raise ValueError("Ignoring metric {}. It returned a tuple with "
"len {}, expected 2.".format(name, len(name)))
else:
if name[1] not in ["classes", "probabilities"]:
raise ValueError("Ignoring metric {}. The 2nd element of its "
"name should be either 'classes' or "
"'probabilities'.".format(name))
elif name[1] == "classes":
class_metrics[name[0]] = metric_op
else:
proba_metrics[name[0]] = metric_op
elif isinstance(name, str):
class_metrics[name] = metric_op
else:
raise ValueError("Ignoring metric {}. Its name is not in the correct "
"form.".format(name))
if class_metrics:
class_predictions = self.logits_to_predictions(logits, proba=False)
result.update(
_run_metrics(class_predictions, labels, class_metrics,
self.get_weight_tensor(features)))
if proba_metrics:
predictions = self.logits_to_predictions(logits, proba=True)
result.update(
_run_metrics(predictions, labels, proba_metrics,
self.get_weight_tensor(features)))
return result
class _BinarySvmTargetColumn(_MultiClassTargetColumn):
"""_TargetColumn for binary classification using SVMs."""
def __init__(self, label_name, weight_column_name):
def loss_fn(logits, target):
check_shape_op = control_flow_ops.Assert(
math_ops.less_equal(array_ops.rank(target), 2),
["target's shape should be either [batch_size, 1] or [batch_size]"])
with ops.control_dependencies([check_shape_op]):
target = array_ops.reshape(
target, shape=[array_ops.shape(target)[0], 1])
return loss_ops.hinge_loss(logits, target)
super(_BinarySvmTargetColumn, self).__init__(
loss_fn=loss_fn,
n_classes=2,
label_name=label_name,
weight_column_name=weight_column_name)
def logits_to_predictions(self, logits, proba=False):
if proba:
raise ValueError(
"logits to probabilities is not supported for _BinarySvmTargetColumn")
logits = array_ops.concat([array_ops.zeros_like(logits), logits], 1)
return math_ops.argmax(logits, 1)
# TODO(zakaria): use contrib losses.
def _mean_squared_loss(logits, target):
# To prevent broadcasting inside "-".
if len(target.get_shape()) == 1:
target = array_ops.expand_dims(target, dim=[1])
logits.get_shape().assert_is_compatible_with(target.get_shape())
return math_ops.square(logits - math_ops.to_float(target))
def _log_loss_with_two_classes(logits, target):
# sigmoid_cross_entropy_with_logits requires [batch_size, 1] target.
if len(target.get_shape()) == 1:
target = array_ops.expand_dims(target, dim=[1])
loss_vec = nn.sigmoid_cross_entropy_with_logits(
labels=math_ops.to_float(target), logits=logits)
return loss_vec
def _softmax_cross_entropy_loss(logits, target):
# Check that we got integer for classification.
if not target.dtype.is_integer:
raise ValueError("Target's dtype should be integer "
"Instead got %s." % target.dtype)
# sparse_softmax_cross_entropy_with_logits requires [batch_size] target.
if len(target.get_shape()) == 2:
target = array_ops.squeeze(target, squeeze_dims=[1])
loss_vec = nn.sparse_softmax_cross_entropy_with_logits(
labels=target, logits=logits)
return loss_vec
def _run_metrics(predictions, labels, metrics, weights):
result = {}
labels = math_ops.cast(labels, predictions.dtype)
for name, metric in six.iteritems(metrics or {}):
if weights is not None:
result[name] = metric(predictions, labels, weights=weights)
else:
result[name] = metric(predictions, labels)
return result
@deprecated(
"2016-11-12", "This file will be removed after the deprecation date."
"Please switch to "
"third_party/tensorflow/contrib/learn/python/learn/estimators/head.py")
def get_default_binary_metrics_for_eval(thresholds):
"""Returns a dictionary of basic metrics for logistic regression.
Args:
thresholds: List of floating point thresholds to use for accuracy,
precision, and recall metrics. If None, defaults to [0.5].
Returns:
Dictionary mapping metrics string names to metrics functions.
"""
metrics = {}
metrics[_MetricKeys.PREDICTION_MEAN] = _predictions_streaming_mean
metrics[_MetricKeys.TARGET_MEAN] = _labels_streaming_mean
# Also include the streaming mean of the label as an accuracy baseline, as
# a reminder to users.
metrics[_MetricKeys.ACCURACY_BASELINE] = _labels_streaming_mean
metrics[_MetricKeys.AUC] = _streaming_auc
for threshold in thresholds:
metrics[_MetricKeys.ACCURACY_MEAN %
threshold] = _accuracy_at_threshold(threshold)
# Precision for positive examples.
metrics[_MetricKeys.PRECISION_MEAN % threshold] = _streaming_at_threshold(
metric_ops.streaming_precision_at_thresholds, threshold)
# Recall for positive examples.
metrics[_MetricKeys.RECALL_MEAN % threshold] = _streaming_at_threshold(
metric_ops.streaming_recall_at_thresholds, threshold)
return metrics
def _float_weights_or_none(weights):
if weights is None:
return None
return math_ops.to_float(weights)
def _labels_streaming_mean(unused_predictions, labels, weights=None):
return metric_ops.streaming_mean(labels, weights=weights)
def _predictions_streaming_mean(predictions, unused_labels, weights=None):
return metric_ops.streaming_mean(predictions, weights=weights)
def _streaming_auc(predictions, labels, weights=None):
return metric_ops.streaming_auc(
predictions, labels, weights=_float_weights_or_none(weights))
def _accuracy_at_threshold(threshold):
def _accuracy_metric(predictions, labels, weights=None):
threshold_predictions = math_ops.to_float(
math_ops.greater_equal(predictions, threshold))
return metric_ops.streaming_accuracy(
predictions=threshold_predictions, labels=labels, weights=weights)
return _accuracy_metric
def _streaming_at_threshold(streaming_metrics_fn, threshold):
def _streaming_metrics(predictions, labels, weights=None):
precision_tensor, update_op = streaming_metrics_fn(
predictions,
labels=labels,
thresholds=[threshold],
weights=_float_weights_or_none(weights))
return array_ops.squeeze(precision_tensor), update_op
return _streaming_metrics
class _MetricKeys(object):
AUC = "auc"
PREDICTION_MEAN = "labels/prediction_mean"
TARGET_MEAN = "labels/actual_target_mean"
ACCURACY_BASELINE = "accuracy/baseline_target_mean"
ACCURACY_MEAN = "accuracy/threshold_%f_mean"
PRECISION_MEAN = "precision/positive_threshold_%f_mean"
RECALL_MEAN = "recall/positive_threshold_%f_mean"
| apache-2.0 |
projectatomic/commissaire | src/commissaire/containermgr/kubernetes/__init__.py | 3 | 10818 | # Copyright (C) 2016 Red Hat, Inc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
The kubernetes container manager package.
"""
import json
import requests
from urllib.parse import urljoin, urlparse
from commissaire.bus import ContainerManagerError
from commissaire.containermgr import ContainerManagerBase
from commissaire.util.config import ConfigurationError
class KubeContainerManager(ContainerManagerBase):
"""
Kubernetes container manager implementation.
"""
def __init__(self, config):
"""
Creates an instance of the Kubernetes Container Manager.
:param config: Configuration details
:type config: dict
"""
ContainerManagerBase.__init__(self, config)
self.__class__.check_config(config)
self.con = requests.Session()
token = config.get('token', None)
if token:
self.con.headers['Authorization'] = 'Bearer {}'.format(token)
self.logger.info('Using bearer token')
self.logger.debug('Bearer token: %s', token)
certificate_path = config.get('certificate_path')
certificate_key_path = config.get('certificate_key_path')
if certificate_path and certificate_key_path:
self.con.cert = (certificate_path, certificate_key_path)
self.logger.info(
'Using client side certificate. Certificate path: %s '
'Certificate Key Path: %s',
certificate_path, certificate_key_path)
# TODO: Verify TLS!!!
self.con.verify = False
self.base_uri = urljoin(config['server_url'], '/api/v1')
self.logger.info(
'Kubernetes Container Manager created: %s', self.base_uri)
self.logger.debug(
'Kubernetes Container Manager: %s', self.__dict__)
@classmethod
def check_config(cls, config):
"""
Examines the configuration parameters for an ContainerManager
and throws a ConfigurationError if any parameters are invalid.
:param cls: ContainerManager class.
:type cls: class
:param config: Configuration dictionary to check.
:type config: dict
:returns: True if configuration is valid
:rtype: bool
:raises: commissaire.util.config.ConfigurationError
"""
try:
url = urlparse(config['server_url'])
except KeyError:
raise ConfigurationError(
'server_url is a required configuration item')
if (bool(config.get('certificate_path')) ^
bool(config.get('certificate_key_path'))):
raise ConfigurationError(
'Both "certificate_path" and "certificate_key_path" '
'must be provided to use a client side certificate')
if config.get('certificate_path'):
if url.scheme != 'https':
raise ConfigurationError(
'Server URL scheme must be "https" when using client '
'side certificates (got "{}")'.format(url.scheme))
def _fix_part(self, part):
"""
Ensures the URI part starts with a slash.
:param part: The URI part. EG: /nodes
:type part: str
:returns: The part in the proper format.
:rtype: str
"""
if not part.startswith('/'):
self.logger.debug(
'Part given without starting slash. Adding...')
part = '/{}'.format(part)
return part
def _get(self, part, *args, **kwargs):
"""
Get information from the Kubernetes apiserver.
:param part: The URI part. EG: /nodes
:type part: sdtr
:param args: All other non-keyword arguments.
:type args: tuple
:param kwargs: All other keyword arguments.
:type kwargs: dict
:returns: requests.Response
"""
part = self._fix_part(part)
self.logger.debug('Executing GET for %s', part)
resp = self.con.get(
'{}{}'.format(self.base_uri, part), *args, **kwargs)
self.logger.debug(
'Response for %s. Status: %s', part, resp.status_code)
return resp
def _delete(self, part, *args, **kwargs):
"""
Delete data from the Kubernetes apiserver.
:param part: The URI part. EG: /nodes
:type part: str
::param payload: Data to send with the DELETE.
:type payload: dict
:param args: All other non-keyword arguments.
:type args: tuple
:param kwargs: All other keyword arguments.
:type kwargs: dict
:returns: requests.Response
"""
part = self._fix_part(part)
self.logger.debug('Executing DELETE for %s.', part)
resp = self.con.delete(
'{}{}'.format(self.base_uri, part), *args, **kwargs)
self.logger.debug(
'Response for %s. Status: %s', part, resp.status_code)
return resp
def _put(self, part, payload, *args, **kwargs):
"""
Put data to the Kubernetes apiserver.
:param part: The URI part. EG: /nodes
:type part: str
::param payload: Data to send with the PUT.
:type payload: dict
:param args: All other non-keyword arguments.
:type args: tuple
:param kwargs: All other keyword arguments.
:type kwargs: dict
:returns: requests.Response
"""
part = self._fix_part(part)
payload_str = json.dumps(payload)
self.logger.debug(
'Executing PUT for %s. Payload=%s', part, payload_str)
resp = self.con.put(
'{}{}'.format(self.base_uri, part),
data=payload_str, *args, **kwargs)
self.logger.debug(
'Response for %s. Status: %s', part, resp.status_code)
return resp
def _post(self, part, payload, *args, **kwargs):
"""
Post data to the Kubernetes apiserver.
:param part: The URI part. EG: /nodes
:type part: str
::param payload: Data to send with the POST.
:type payload: dict
:param args: All other non-keyword arguments.
:type args: tuple
:param kwargs: All other keyword arguments.
:type kwargs: dict
:returns: requests.Response
"""
part = self._fix_part(part)
payload_str = json.dumps(payload)
self.logger.debug(
'Executing POST for %s. Payload=%s', part, payload_str)
resp = self.con.post(
'{}{}'.format(self.base_uri, part),
data=payload_str, *args, **kwargs)
self.logger.debug(
'Response for %s. Status: %s', part, resp.status_code)
return resp
def register_node(self, name):
"""
Registers a node to the Kubernetes Container Manager.
:param name: The name of the node.
:type name: str
:raises: commissaire.bus.ContainerManagerError
"""
part = '/nodes'
payload = {
"kind": "Node",
"apiVersion": "v1",
"metadata": {
"name": name,
}
}
resp = self._post(part, payload)
if resp.status_code != 201:
error_msg = (
'Non-created response when trying to register the node {}. '
'Status: "{}", Data: "{}"'.format(
name, resp.status_code, resp.text))
self.logger.error(error_msg)
raise ContainerManagerError(error_msg, resp.status_code)
def remove_node(self, name):
"""
Removes a node from the Kubernetes Container Manager.
:param name: The name of the node.
:type name: str
:raises: commissaire.bus.ContainerManagerError
"""
part = '/nodes/{}'.format(name)
resp = self._delete(part)
if resp.status_code != 200:
error_msg = (
'Unexpected response when trying to remove the node {}. '
'Status: {}, Data: {}'.format(
name, resp.status_code, resp.text))
self.logger.error(error_msg)
raise ContainerManagerError(error_msg, resp.status_code)
def remove_all_nodes(self):
"""
Removes all nodes from the Kubernetes Container Manager.
:raises: commissaire.bus.ContainerManagerError
"""
resp = self._delete('/nodes')
if resp.status_code != 200:
error_msg = (
'Unexpected response when trying to remove all nodes. '
'Status: {}, Data: {}'.format(
resp.status_code, resp.text))
self.logger.error(error_msg)
raise ContainerManagerError(error_msg, resp.status_code)
def node_registered(self, name):
"""
Checks is a node was registered.
:param name: The name of the node.
:type name: str
:raises: commissaire.bus.ContainerManagerError
"""
part = '/nodes/{}'.format(name)
resp = self._get(part)
if resp.status_code != 200:
error_msg = 'Node {} is not registered. Status: {}'.format(
name, resp.status_code)
self.logger.error(error_msg)
raise ContainerManagerError(error_msg, resp.status_code)
def get_node_status(self, name, raw=False):
"""
Returns the node status.
:param name: The name of the node.
:type name: str
:param raw: If the result should be limited to its own status.
:type raw: bool
:returns: The response back from kubernetes.
:rtype: dict
:raises: commissaire.bus.ContainerManagerError
"""
part = '/nodes/{}'.format(name)
resp = self._get(part)
if resp.status_code != 200:
error_msg = (
'No status for {} returned. Status: {}'.format(
name, resp.status_code))
self.logger.error(error_msg)
raise ContainerManagerError(error_msg, resp.status_code)
data = resp.json()
if raw:
data = data['status']
return data
PluginClass = KubeContainerManager
| gpl-3.0 |
ujenmr/ansible | lib/ansible/modules/network/cloudengine/ce_info_center_global.py | 25 | 69958 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_info_center_global
version_added: "2.4"
short_description: Manages outputting logs on HUAWEI CloudEngine switches.
description:
- This module offers the ability to be output to the log buffer, log file, console, terminal, or log host on HUAWEI CloudEngine switches.
author:
- Li Yanfeng (@QijunPan)
options:
info_center_enable:
description:
- Whether the info-center function is enabled. The value is of the Boolean type.
choices: ['true','false']
packet_priority:
description:
- Set the priority of the syslog packet.The value is an integer ranging from 0 to 7. The default value is 0.
suppress_enable:
description:
- Whether a device is enabled to suppress duplicate statistics. The value is of the Boolean type.
choices: [ 'false', 'true' ]
logfile_max_num:
description:
- Maximum number of log files of the same type. The default value is 200.
- The value range for log files is[3, 500], for security files is [1, 3],and for operation files is [1, 7].
logfile_max_size:
description:
- Maximum size (in MB) of a log file. The default value is 32.
- The value range for log files is [4, 8, 16, 32], for security files is [1, 4],
- and for operation files is [1, 4].
default: 32
choices: ['4', '8', '16', '32']
channel_id:
description:
- Number for channel. The value is an integer ranging from 0 to 9. The default value is 0.
channel_cfg_name:
description:
- Channel name.The value is a string of 1 to 30 case-sensitive characters. The default value is console.
default: console
channel_out_direct:
description:
- Direction of information output.
choices: ['console','monitor','trapbuffer','logbuffer','snmp','logfile']
filter_feature_name:
description:
- Feature name of the filtered log. The value is a string of 1 to 31 case-insensitive characters.
filter_log_name:
description:
- Name of the filtered log. The value is a string of 1 to 63 case-sensitive characters.
ip_type:
description:
- Log server address type, IPv4 or IPv6.
choices: ['ipv4','ipv6']
server_ip:
description:
- Log server address, IPv4 or IPv6 type. The value is a string of 0 to 255 characters.
The value can be an valid IPv4 or IPv6 address.
server_domain:
description:
- Server name. The value is a string of 1 to 255 case-sensitive characters.
is_default_vpn:
description:
- Use the default VPN or not.
type: bool
default: 'no'
vrf_name:
description:
- VPN name on a log server. The value is a string of 1 to 31 case-sensitive characters.
The default value is _public_.
level:
description:
- Level of logs saved on a log server.
choices: ['emergencies','alert','critical','error','warning','notification','informational','debugging']
server_port:
description:
- Number of a port sending logs.The value is an integer ranging from 1 to 65535.
For UDP, the default value is 514. For TCP, the default value is 601. For TSL, the default value is 6514.
facility:
description:
- Log record tool.
choices: ['local0','local1','local2','local3','local4','local5','local6','local7']
channel_name:
description:
- Channel name. The value is a string of 1 to 30 case-sensitive characters.
timestamp:
description:
- Log server timestamp. The value is of the enumerated type and case-sensitive.
choices: ['UTC', 'localtime']
transport_mode:
description:
- Transport mode. The value is of the enumerated type and case-sensitive.
choices: ['tcp','udp']
ssl_policy_name:
description:
- SSL policy name. The value is a string of 1 to 23 case-sensitive characters.
source_ip:
description:
- Log source ip address, IPv4 or IPv6 type. The value is a string of 0 to 255.
The value can be an valid IPv4 or IPv6 address.
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
- name: info center global module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Config info-center enable
ce_info_center_global:
info_center_enable: true
state: present
provider: "{{ cli }}"
- name: Config statistic-suppress enable
ce_info_center_global:
suppress_enable: true
state: present
provider: "{{ cli }}"
- name: Config info-center syslog packet-priority 1
ce_info_center_global:
packet_priority: 2
state: present
provider: "{{ cli }}"
- name: Config info-center channel 1 name aaa
ce_info_center_global:
channel_id: 1
channel_cfg_name: aaa
state: present
provider: "{{ cli }}"
- name: Config info-center logfile size 10
ce_info_center_global:
logfile_max_num: 10
state: present
provider: "{{ cli }}"
- name: Config info-center console channel 1
ce_info_center_global:
channel_out_direct: console
channel_id: 1
state: present
provider: "{{ cli }}"
- name: Config info-center filter-id bymodule-alias snmp snmp_ipunlock
ce_info_center_global:
filter_feature_name: SNMP
filter_log_name: SNMP_IPLOCK
state: present
provider: "{{ cli }}"
- name: Config info-center max-logfile-number 16
ce_info_center_global:
logfile_max_size: 16
state: present
provider: "{{ cli }}"
- name: Config syslog loghost domain.
ce_info_center_global:
server_domain: aaa
vrf_name: aaa
channel_id: 1
transport_mode: tcp
facility: local4
server_port: 100
level: alert
timestamp: UTC
state: present
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"channel_id": "1", "facility": "local4", "is_default_vpn": True, "level": "alert", "server_domain": "aaa",
"server_port": "100", "state": "present", "timestamp": "localtime", "transport_mode": "tcp"}
existing:
description: k/v pairs of existing rollback
returned: always
type: dict
sample:
"server_domain_info": [
{
"chnlId": "1",
"chnlName": "monitor",
"facility": "local4",
"isBriefFmt": "false",
"isDefaultVpn": "false",
"level": "alert",
"serverDomain": "aaa",
"serverPort": "100",
"sourceIP": "0.0.0.0",
"sslPolicyName": "gmc",
"timestamp": "UTC",
"transportMode": "tcp",
"vrfName": "aaa"
}
]
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample:
"server_domain_info": [
{
"chnlId": "1",
"chnlName": "monitor",
"facility": "local4",
"isBriefFmt": "false",
"isDefaultVpn": "true",
"level": "alert",
"serverDomain": "aaa",
"serverPort": "100",
"sourceIP": "0.0.0.0",
"sslPolicyName": null,
"timestamp": "localtime",
"transportMode": "tcp",
"vrfName": "_public_"
},
{
"chnlId": "1",
"chnlName": "monitor",
"facility": "local4",
"isBriefFmt": "false",
"isDefaultVpn": "false",
"level": "alert",
"serverDomain": "aaa",
"serverPort": "100",
"sourceIP": "0.0.0.0",
"sslPolicyName": "gmc",
"timestamp": "UTC",
"transportMode": "tcp",
"vrfName": "aaa"
}
]
updates:
description: command sent to the device
returned: always
type: list
sample: ["info-center loghost domain aaa level alert port 100 facility local4 channel 1 localtime transport tcp"]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import ce_argument_spec, get_nc_config, set_nc_config, check_ip_addr
CE_NC_GET_CENTER_GLOBAL_INFO_HEADER = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<globalParam>
"""
CE_NC_GET_CENTER_GLOBAL_INFO_TAIL = """
</globalParam>
</syslog>
</filter>
"""
CE_NC_MERGE_CENTER_GLOBAL_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<globalParam operation="merge">
"""
CE_NC_MERGE_CENTER_GLOBAL_INFO_TAIL = """
</globalParam>
</syslog>
</config>
"""
CE_NC_GET_LOG_FILE_INFO_HEADER = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icLogFileInfos>
<icLogFileInfo>
"""
CE_NC_GET_LOG_FILE_INFO_TAIL = """
</icLogFileInfo>
</icLogFileInfos>
</syslog>
</filter>
"""
CE_NC_MERGE_LOG_FILE_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icLogFileInfos>
<icLogFileInfo operation="merge">
"""
CE_NC_MERGE_LOG_FILE_INFO_TAIL = """
</icLogFileInfo>
</icLogFileInfos>
</syslog>
</config>
"""
CE_NC_GET_CHANNEL_INFO = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icChannels>
<icChannel>
<icChnlId>%s</icChnlId>
<icChnlCfgName></icChnlCfgName>
</icChannel>
</icChannels>
</syslog>
</filter>
"""
CE_NC_MERGE_CHANNEL_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icChannels>
<icChannel operation="merge">
"""
CE_NC_MERGE_CHANNEL_INFO_TAIL = """
</icChannel>
</icChannels>
</syslog>
</config>
"""
CE_NC_GET_CHANNEL_DIRECT_INFO = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icDirChannels>
<icDirChannel>
<icOutDirect>%s</icOutDirect>
<icCfgChnlId></icCfgChnlId>
</icDirChannel>
</icDirChannels>
</syslog>
</filter>
"""
CE_NC_MERGE_CHANNEL_DIRECT_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icDirChannels>
<icDirChannel operation="merge">
"""
CE_NC_MERGE_CHANNEL_DIRECT_TAIL = """
</icDirChannel>
</icDirChannels>
</syslog>
</config>
"""
CE_NC_GET_FILTER_INFO = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icFilters>
<icFilter>
<icFeatureName></icFeatureName>
<icFilterLogName></icFilterLogName>
</icFilter>
</icFilters>
</syslog>
</filter>
"""
CE_NC_CREATE_CHANNEL_FILTER_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icFilters>
<icFilter operation="create">
"""
CE_NC_CREATE_CHANNEL_FILTER_TAIL = """
</icFilter>
</icFilters>
</syslog>
</config>
"""
CE_NC_DELETE_CHANNEL_FILTER_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<icFilters>
<icFilter operation="delete">
"""
CE_NC_DELETE_CHANNEL_FILTER_TAIL = """
</icFilter>
</icFilters>
</syslog>
</config>
"""
CE_NC_GET_SERVER_IP_INFO_HEADER = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogServers>
<syslogServer>
<ipType>%s</ipType>
<serverIp>%s</serverIp>
<vrfName>%s</vrfName>
<isDefaultVpn>%s</isDefaultVpn>
"""
CE_NC_GET_SERVER_IP_INFO_TAIL = """
</syslogServer>
</syslogServers>
</syslog>
</filter>
"""
CE_NC_MERGE_SERVER_IP_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogServers>
<syslogServer operation="merge">
<ipType>%s</ipType>
<serverIp>%s</serverIp>
<vrfName>%s</vrfName>
<isDefaultVpn>%s</isDefaultVpn>
"""
CE_NC_MERGE_SERVER_IP_INFO_TAIL = """
</syslogServer>
</syslogServers>
</syslog>
</config>
"""
CE_NC_DELETE_SERVER_IP_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogServers>
<syslogServer operation="delete">
<ipType>%s</ipType>
<serverIp>%s</serverIp>
<vrfName>%s</vrfName>
<isDefaultVpn>%s</isDefaultVpn>
"""
CE_NC_DELETE_SERVER_IP_INFO_TAIL = """
</syslogServer>
</syslogServers>
</syslog>
</config>
"""
CE_NC_GET_SERVER_DNS_INFO_HEADER = """
<filter type="subtree">
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogDNSs>
<syslogDNS>
"""
CE_NC_GET_SERVER_DNS_INFO_TAIL = """
</syslogDNS>
</syslogDNSs>
</syslog>
</filter>
"""
CE_NC_MERGE_SERVER_DNS_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogDNSs>
<syslogDNS operation="merge">
<serverDomain>%s</serverDomain>
<vrfName>%s</vrfName>
<isDefaultVpn>%s</isDefaultVpn>
"""
CE_NC_MERGE_SERVER_DNS_INFO_TAIL = """
</syslogDNS>
</syslogDNSs>
</syslog>
</config>
"""
CE_NC_DELETE_SERVER_DNS_INFO_HEADER = """
<config>
<syslog xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<syslogDNSs>
<syslogDNS operation="delete">
<serverDomain>%s</serverDomain>
<vrfName>%s</vrfName>
<isDefaultVpn>%s</isDefaultVpn>
"""
CE_NC_DELETE_SERVER_DNS_INFO_TAIL = """
</syslogDNS>
</syslogDNSs>
</syslog>
</config>
"""
def get_out_direct_default(out_direct):
"""get default out direct"""
outdict = {"console": "1", "monitor": "2", "trapbuffer": "3",
"logbuffer": "4", "snmp": "5", "logfile": "6"}
channel_id_default = outdict.get(out_direct)
return channel_id_default
def get_channel_name_default(channel_id):
"""get default out direct"""
channel_dict = {"0": "console", "1": "monitor", "2": "loghost", "3": "trapbuffer", "4": "logbuffer",
"5": "snmpagent", "6": "channel6", "7": "channel7", "8": "channel8", "9": "channel9"}
channel_name_default = channel_dict.get(channel_id)
return channel_name_default
class InfoCenterGlobal(object):
"""
Manages info center global configuration.
"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.init_module()
# module input info
self.info_center_enable = self.module.params['info_center_enable'] or None
self.packet_priority = self.module.params['packet_priority'] or None
self.suppress_enable = self.module.params['suppress_enable'] or None
self.logfile_max_num = self.module.params['logfile_max_num'] or None
self.logfile_max_size = self.module.params['logfile_max_size'] or None
self.channel_id = self.module.params['channel_id'] or None
self.channel_cfg_name = self.module.params['channel_cfg_name'] or None
self.channel_out_direct = self.module.params['channel_out_direct'] or None
self.filter_feature_name = self.module.params['filter_feature_name'] or None
self.filter_log_name = self.module.params['filter_log_name'] or None
self.ip_type = self.module.params['ip_type'] or None
self.server_ip = self.module.params['server_ip'] or None
self.server_domain = self.module.params['server_domain'] or None
self.is_default_vpn = self.module.params['is_default_vpn'] or None
self.vrf_name = self.module.params['vrf_name'] or None
self.level = self.module.params['level'] or None
self.server_port = self.module.params['server_port'] or None
self.facility = self.module.params['facility'] or None
self.channel_name = self.module.params['channel_name'] or None
self.timestamp = self.module.params['timestamp'] or None
self.transport_mode = self.module.params['transport_mode'] or None
self.ssl_policy_name = self.module.params['ssl_policy_name'] or None
self.source_ip = self.module.params['source_ip'] or None
self.state = self.module.params['state'] or None
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.existing = dict()
self.proposed = dict()
self.end_state = dict()
# syslog info
self.cur_global_info = None
self.cur_logfile_info = None
self.channel_info = None
self.channel_direct_info = None
self.filter_info = None
self.server_ip_info = None
self.server_domain_info = None
def init_module(self):
""" init module """
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def check_response(self, con_obj, xml_name):
"""Check if response message is already succeed."""
xml_str = con_obj.xml
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def get_channel_dict(self):
""" get channel attributes dict."""
channel_info = dict()
# get channel info
conf_str = CE_NC_GET_CHANNEL_INFO % self.channel_id
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return channel_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
channel_info["channelInfos"] = list()
channels = root.findall("data/syslog/icChannels/icChannel")
if channels:
for channel in channels:
channel_dict = dict()
for ele in channel:
if ele.tag in ["icChnlId", "icChnlCfgName"]:
channel_dict[ele.tag] = ele.text
channel_info["channelInfos"].append(channel_dict)
return channel_info
def is_exist_channel_id_name(self, channel_id, channel_name):
"""if channel id exist"""
if not self.channel_info:
return False
for id2name in self.channel_info["channelInfos"]:
if id2name["icChnlId"] == channel_id and id2name["icChnlCfgName"] == channel_name:
return True
return False
def config_merge_syslog_channel(self, channel_id, channel_name):
"""config channel id"""
if not self.is_exist_channel_id_name(channel_id, channel_name):
conf_str = CE_NC_MERGE_CHANNEL_INFO_HEADER
if channel_id:
conf_str += "<icChnlId>%s</icChnlId>" % channel_id
if channel_name:
conf_str += "<icChnlCfgName>%s</icChnlCfgName>" % channel_name
conf_str += CE_NC_MERGE_CHANNEL_INFO_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog channel id failed.')
self.updates_cmd.append(
"info-center channel %s name %s" % (channel_id, channel_name))
self.changed = True
def delete_merge_syslog_channel(self, channel_id, channel_name):
"""delete channel id"""
change_flag = False
if channel_name:
for id2name in self.channel_info["channelInfos"]:
channel_default_name = get_channel_name_default(
id2name["icChnlId"])
if id2name["icChnlId"] == channel_id and id2name["icChnlCfgName"] == channel_name:
channel_name = channel_default_name
change_flag = True
if not channel_name:
for id2name in self.channel_info["channelInfos"]:
channel_default_name = get_channel_name_default(
id2name["icChnlId"])
if id2name["icChnlId"] == channel_id and id2name["icChnlCfgName"] != channel_default_name:
channel_name = channel_default_name
change_flag = True
if change_flag:
conf_str = CE_NC_MERGE_CHANNEL_INFO_HEADER
if channel_id:
conf_str += "<icChnlId>%s</icChnlId>" % channel_id
if channel_name:
conf_str += "<icChnlCfgName>%s</icChnlCfgName>" % channel_name
conf_str += CE_NC_MERGE_CHANNEL_INFO_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog channel id failed.')
self.updates_cmd.append("undo info-center channel %s" % channel_id)
self.changed = True
def get_channel_direct_dict(self):
""" get channel direct attributes dict."""
channel_direct_info = dict()
# get channel direct info
conf_str = CE_NC_GET_CHANNEL_DIRECT_INFO % self.channel_out_direct
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return channel_direct_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
channel_direct_info["channelDirectInfos"] = list()
dir_channels = root.findall("data/syslog/icDirChannels/icDirChannel")
if dir_channels:
for ic_dir_channel in dir_channels:
channel_direct_dict = dict()
for ele in ic_dir_channel:
if ele.tag in ["icOutDirect", "icCfgChnlId"]:
channel_direct_dict[ele.tag] = ele.text
channel_direct_info["channelDirectInfos"].append(
channel_direct_dict)
return channel_direct_info
def is_exist_out_direct(self, out_direct, channel_id):
"""if channel out direct exist"""
if not self.channel_direct_info:
return False
for id2name in self.channel_direct_info["channelDirectInfos"]:
if id2name["icOutDirect"] == out_direct and id2name["icCfgChnlId"] == channel_id:
return True
return False
def config_merge_out_direct(self, out_direct, channel_id):
"""config out direct"""
if not self.is_exist_out_direct(out_direct, channel_id):
conf_str = CE_NC_MERGE_CHANNEL_DIRECT_HEADER
if out_direct:
conf_str += "<icOutDirect>%s</icOutDirect>" % out_direct
if channel_id:
conf_str += "<icCfgChnlId>%s</icCfgChnlId>" % channel_id
conf_str += CE_NC_MERGE_CHANNEL_DIRECT_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog channel out direct failed.')
self.updates_cmd.append(
"info-center %s channel %s" % (out_direct, channel_id))
self.changed = True
def delete_merge_out_direct(self, out_direct, channel_id):
"""delete out direct"""
change_flag = False
channel_id_default = get_out_direct_default(out_direct)
if channel_id:
for id2name in self.channel_direct_info["channelDirectInfos"]:
if id2name["icOutDirect"] == out_direct and id2name["icCfgChnlId"] == channel_id:
if channel_id != channel_id_default:
channel_id = channel_id_default
change_flag = True
if not channel_id:
for id2name in self.channel_direct_info["channelDirectInfos"]:
if id2name["icOutDirect"] == out_direct and id2name["icCfgChnlId"] != channel_id_default:
channel_id = channel_id_default
change_flag = True
if change_flag:
conf_str = CE_NC_MERGE_CHANNEL_DIRECT_HEADER
if out_direct:
conf_str += "<icOutDirect>%s</icOutDirect>" % out_direct
if channel_id:
conf_str += "<icCfgChnlId>%s</icCfgChnlId>" % channel_id
conf_str += CE_NC_MERGE_CHANNEL_DIRECT_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog channel out direct failed.')
self.updates_cmd.append("undo info-center logfile channel")
self.changed = True
def get_filter_dict(self):
""" get syslog filter attributes dict."""
filter_info = dict()
# get filter info
conf_str = CE_NC_GET_FILTER_INFO
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return filter_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
filter_info["filterInfos"] = list()
ic_filters = root.findall("data/syslog/icFilters/icFilter")
if ic_filters:
for ic_filter in ic_filters:
filter_dict = dict()
for ele in ic_filter:
if ele.tag in ["icFeatureName", "icFilterLogName"]:
filter_dict[ele.tag] = ele.text
filter_info["filterInfos"].append(filter_dict)
return filter_info
def is_exist_filter(self, filter_feature_name, filter_log_name):
"""if filter info exist"""
if not self.filter_info:
return False
for id2name in self.filter_info["filterInfos"]:
if id2name["icFeatureName"] == filter_feature_name and id2name["icFilterLogName"] == filter_log_name:
return True
return False
def config_merge_filter(self, filter_feature_name, filter_log_name):
"""config filter"""
if not self.is_exist_filter(filter_feature_name, filter_log_name):
conf_str = CE_NC_CREATE_CHANNEL_FILTER_HEADER
conf_str += "<icFilterFlag>true</icFilterFlag>"
if filter_feature_name:
conf_str += "<icFeatureName>%s</icFeatureName>" % filter_feature_name
if filter_log_name:
conf_str += "<icFilterLogName>%s</icFilterLogName>" % filter_log_name
conf_str += CE_NC_CREATE_CHANNEL_FILTER_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge syslog filter failed.')
self.updates_cmd.append("info-center filter-id bymodule-alias %s %s"
% (filter_feature_name, filter_log_name))
self.changed = True
def delete_merge_filter(self, filter_feature_name, filter_log_name):
"""delete filter"""
change_flag = False
if self.is_exist_filter(filter_feature_name, filter_log_name):
for id2name in self.filter_info["filterInfos"]:
if id2name["icFeatureName"] == filter_feature_name and id2name["icFilterLogName"] == filter_log_name:
change_flag = True
if change_flag:
conf_str = CE_NC_DELETE_CHANNEL_FILTER_HEADER
conf_str += "<icFilterFlag>true</icFilterFlag>"
if filter_feature_name:
conf_str += "<icFeatureName>%s</icFeatureName>" % filter_feature_name
if filter_log_name:
conf_str += "<icFilterLogName>%s</icFilterLogName>" % filter_log_name
conf_str += CE_NC_DELETE_CHANNEL_FILTER_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog channel out direct failed.')
self.updates_cmd.append("undo info-center filter-id bymodule-alias %s %s"
% (filter_feature_name, filter_log_name))
self.changed = True
def get_server_ip_dict(self):
""" get server ip attributes dict."""
server_ip_info = dict()
# get server ip info
is_default_vpn = "false"
if not self.is_default_vpn:
self.is_default_vpn = False
if self.is_default_vpn is True:
is_default_vpn = "true"
if not self.vrf_name:
self.vrf_name = "_public_"
conf_str = CE_NC_GET_SERVER_IP_INFO_HEADER % (
self.ip_type, self.server_ip, self.vrf_name, is_default_vpn)
conf_str += CE_NC_GET_SERVER_IP_INFO_TAIL
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return server_ip_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
server_ip_info["serverIpInfos"] = list()
syslog_servers = root.findall("data/syslog/syslogServers/syslogServer")
if syslog_servers:
for syslog_server in syslog_servers:
server_dict = dict()
for ele in syslog_server:
if ele.tag in ["ipType", "serverIp", "vrfName", "level", "serverPort", "facility", "chnlId",
"chnlName", "timestamp", "transportMode", "sslPolicyName", "isDefaultVpn",
"sourceIP", "isBriefFmt"]:
server_dict[ele.tag] = ele.text
server_ip_info["serverIpInfos"].append(server_dict)
return server_ip_info
def config_merge_loghost(self):
"""config loghost ip or dns"""
conf_str = ""
is_default_vpn = "false"
if self.is_default_vpn is True:
is_default_vpn = "true"
if self.ip_type:
conf_str = CE_NC_MERGE_SERVER_IP_INFO_HEADER % (self.ip_type, self.server_ip, self.vrf_name,
is_default_vpn)
elif self.server_domain:
conf_str = CE_NC_MERGE_SERVER_DNS_INFO_HEADER % (
self.server_domain, self.vrf_name, is_default_vpn)
if self.level:
conf_str += "<level>%s</level>" % self.level
if self.server_port:
conf_str += "<serverPort>%s</serverPort>" % self.server_port
if self.facility:
conf_str += "<facility>%s</facility>" % self.facility
if self.channel_id:
conf_str += "<chnlId>%s</chnlId>" % self.channel_id
if self.channel_name:
conf_str += "<chnlName>%s</chnlName>" % self.channel_name
if self.timestamp:
conf_str += "<timestamp>%s</timestamp>" % self.timestamp
if self.transport_mode:
conf_str += "<transportMode>%s</transportMode>" % self.transport_mode
if self.ssl_policy_name:
conf_str += "<sslPolicyName>%s</sslPolicyName>" % self.ssl_policy_name
if self.source_ip:
conf_str += "<sourceIP>%s</sourceIP>" % self.source_ip
if self.ip_type:
conf_str += CE_NC_MERGE_SERVER_IP_INFO_TAIL
elif self.server_domain:
conf_str += CE_NC_MERGE_SERVER_DNS_INFO_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge server loghost failed.')
cmd = "info-center loghost"
if self.ip_type == "ipv4" and self.server_ip:
cmd += " %s" % self.server_ip
if self.ip_type == "ipv6" and self.server_ip:
cmd += " ipv6 %s" % self.server_ip
if self.server_domain:
cmd += " domain %s" % self.server_domain
if self.vrf_name:
if self.vrf_name != "_public_":
cmd += " vpn-instance %s" % self.vrf_name
if self.level:
cmd += " level %s" % self.level
if self.server_port:
cmd += " port %s" % self.server_port
if self.facility:
cmd += " facility %s" % self.facility
if self.channel_id:
cmd += " channel %s" % self.channel_id
if self.channel_name:
cmd += " channel %s" % self.channel_name
if self.timestamp:
cmd += " %s" % self.timestamp
if self.transport_mode:
cmd += " transport %s" % self.transport_mode
if self.source_ip:
cmd += " source-ip %s" % self.source_ip
if self.ssl_policy_name:
cmd += " ssl-policy %s" % self.ssl_policy_name
self.updates_cmd.append(cmd)
self.changed = True
def delete_merge_loghost(self):
"""delete loghost ip or dns"""
conf_str = ""
is_default_vpn = "false"
if self.is_default_vpn is True:
is_default_vpn = "true"
if self.ip_type:
conf_str = CE_NC_DELETE_SERVER_IP_INFO_HEADER % (self.ip_type, self.server_ip, self.vrf_name,
is_default_vpn)
elif self.server_domain:
conf_str = CE_NC_DELETE_SERVER_DNS_INFO_HEADER % (
self.server_domain, self.vrf_name, is_default_vpn)
if self.level:
conf_str += "<level>%s</level>" % self.level
if self.server_port:
conf_str += "<serverPort>%s</serverPort>" % self.server_port
if self.facility:
conf_str += "<facility>%s</facility>" % self.facility
if self.channel_id:
conf_str += "<chnlId>%s</chnlId>" % self.channel_id
if self.channel_name:
conf_str += "<chnlName>%s</chnlName>" % self.channel_name
if self.timestamp:
conf_str += "<timestamp>%s</timestamp>" % self.timestamp
if self.transport_mode:
conf_str += "<transportMode>%s</transportMode>" % self.transport_mode
if self.ssl_policy_name:
conf_str += "<sslPolicyName>%s</sslPolicyName>" % self.ssl_policy_name
if self.source_ip:
conf_str += "<sourceIP>%s</sourceIP>" % self.source_ip
if self.ip_type:
conf_str += CE_NC_DELETE_SERVER_IP_INFO_TAIL
elif self.server_domain:
conf_str += CE_NC_DELETE_SERVER_DNS_INFO_TAIL
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge server loghost failed.')
cmd = "undo info-center loghost"
if self.ip_type == "ipv4" and self.server_ip:
cmd += " %s" % self.server_ip
if self.ip_type == "ipv6" and self.server_ip:
cmd += " ipv6 %s" % self.server_ip
if self.server_domain:
cmd += " domain %s" % self.server_domain
if self.vrf_name:
if self.vrf_name != "_public_":
cmd += " vpn-instance %s" % self.vrf_name
if self.level:
cmd += " level %s" % self.level
if self.server_port:
cmd += " port %s" % self.server_port
if self.facility:
cmd += " facility %s" % self.facility
if self.channel_id:
cmd += " channel %s" % self.channel_id
if self.channel_name:
cmd += " channel %s" % self.channel_name
if self.timestamp:
cmd += " %s" % self.timestamp
if self.transport_mode:
cmd += " transport %s" % self.transport_mode
if self.source_ip:
cmd += " source-ip %s" % self.source_ip
if self.ssl_policy_name:
cmd += " ssl-policy %s" % self.ssl_policy_name
self.updates_cmd.append(cmd)
self.changed = True
def get_server_domain_dict(self):
""" get server domain attributes dict"""
server_domain_info = dict()
# get server domain info
if not self.is_default_vpn:
self.is_default_vpn = False
if not self.vrf_name:
self.vrf_name = "_public_"
conf_str = CE_NC_GET_SERVER_DNS_INFO_HEADER
conf_str += CE_NC_GET_SERVER_DNS_INFO_TAIL
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return server_domain_info
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
server_domain_info["serverAddressInfos"] = list()
syslog_dnss = root.findall("data/syslog/syslogDNSs/syslogDNS")
if syslog_dnss:
for syslog_dns in syslog_dnss:
dns_dict = dict()
for ele in syslog_dns:
if ele.tag in ["serverDomain", "vrfName", "level", "serverPort", "facility", "chnlId",
"chnlName", "timestamp", "transportMode", "sslPolicyName", "isDefaultVpn",
"sourceIP", "isBriefFmt"]:
dns_dict[ele.tag] = ele.text
server_domain_info["serverAddressInfos"].append(dns_dict)
return server_domain_info
def check_need_loghost_cfg(self):
""" check need cfg"""
need_cfg = False
find_flag = False
if self.ip_type and self.server_ip:
if self.server_ip_info:
for tmp in self.server_ip_info["serverIpInfos"]:
find_flag = True
if self.ip_type and tmp.get("ipType") != self.ip_type:
find_flag = False
if self.server_ip and tmp.get("serverIp") != self.server_ip:
find_flag = False
if self.vrf_name and tmp.get("vrfName") != self.vrf_name:
find_flag = False
if self.level and tmp.get("level") != self.level:
find_flag = False
if self.server_port and tmp.get("serverPort") != self.server_port:
find_flag = False
if self.facility and tmp.get("facility") != self.facility:
find_flag = False
if self.channel_id and tmp.get("chnlId") != self.channel_id:
find_flag = False
if self.channel_name and tmp.get("chnlName") != self.channel_name:
find_flag = False
if self.timestamp and tmp.get("timestamp") != self.timestamp:
find_flag = False
if self.transport_mode and tmp.get("transportMode") != self.transport_mode:
find_flag = False
if self.ssl_policy_name and tmp.get("sslPolicyName") != self.ssl_policy_name:
find_flag = False
if self.source_ip and tmp.get("sourceIP") != self.source_ip:
find_flag = False
if find_flag:
break
elif self.server_domain:
if self.server_domain_info:
for tmp in self.server_domain_info["serverAddressInfos"]:
find_flag = True
if self.server_domain and tmp.get("serverDomain") != self.server_domain:
find_flag = False
if self.vrf_name and tmp.get("vrfName") != self.vrf_name:
find_flag = False
if self.level and tmp.get("level") != self.level:
find_flag = False
if self.server_port and tmp.get("serverPort") != self.server_port:
find_flag = False
if self.facility and tmp.get("facility") != self.facility:
find_flag = False
if self.channel_id and tmp.get("chnlId") != self.channel_id:
find_flag = False
if self.channel_name and tmp.get("chnlName") != self.channel_name:
find_flag = False
if self.timestamp and tmp.get("timestamp") != self.timestamp:
find_flag = False
if self.transport_mode and tmp.get("transportMode") != self.transport_mode:
find_flag = False
if self.ssl_policy_name and tmp.get("sslPolicyName") != self.ssl_policy_name:
find_flag = False
if self.source_ip and tmp.get("sourceIP") != self.source_ip:
find_flag = False
if find_flag:
break
else:
find_flag = False
if self.state == "present":
need_cfg = bool(not find_flag)
elif self.state == "absent":
need_cfg = bool(find_flag)
return need_cfg
def get_syslog_global(self):
"""get syslog global attributes"""
cur_global_info = dict()
conf_str = CE_NC_GET_CENTER_GLOBAL_INFO_HEADER
if self.info_center_enable:
conf_str += "<icEnable></icEnable>"
if self.packet_priority:
conf_str += "<packetPriority></packetPriority>"
if self.suppress_enable:
conf_str += "<suppressEnable></suppressEnable>"
conf_str += CE_NC_GET_CENTER_GLOBAL_INFO_TAIL
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return cur_global_info
else:
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
global_info = root.findall(
"data/syslog/globalParam")
if global_info:
for tmp in global_info:
for site in tmp:
if site.tag in ["icEnable", "packetPriority", "suppressEnable"]:
cur_global_info[site.tag] = site.text
return cur_global_info
def merge_syslog_global(self):
"""config global"""
conf_str = CE_NC_MERGE_CENTER_GLOBAL_INFO_HEADER
if self.info_center_enable:
conf_str += "<icEnable>%s</icEnable>" % self.info_center_enable
if self.packet_priority:
if self.state == "present":
packet_priority = self.packet_priority
else:
packet_priority = 0
conf_str += "<packetPriority>%s</packetPriority>" % packet_priority
if self.suppress_enable:
conf_str += "<suppressEnable>%s</suppressEnable>" % self.suppress_enable
conf_str += CE_NC_MERGE_CENTER_GLOBAL_INFO_TAIL
if self.info_center_enable == "true" and self.cur_global_info["icEnable"] != self.info_center_enable:
cmd = "info-center enable"
self.updates_cmd.append(cmd)
self.changed = True
if self.suppress_enable == "true" and self.cur_global_info["suppressEnable"] != self.suppress_enable:
cmd = "info-center statistic-suppress enable"
self.updates_cmd.append(cmd)
self.changed = True
if self.info_center_enable == "false" and self.cur_global_info["icEnable"] != self.info_center_enable:
cmd = "undo info-center enable"
self.updates_cmd.append(cmd)
self.changed = True
if self.suppress_enable == "false" and self.cur_global_info["suppressEnable"] != self.suppress_enable:
cmd = "undo info-center statistic-suppress enable"
self.updates_cmd.append(cmd)
self.changed = True
if self.state == "present":
if self.packet_priority:
if self.packet_priority != "0" and self.cur_global_info["packetPriority"] != self.packet_priority:
cmd = "info-center syslog packet-priority %s" % self.packet_priority
self.updates_cmd.append(cmd)
self.changed = True
if self.state == "absent":
if self.packet_priority:
if self.packet_priority != "0" and self.cur_global_info["packetPriority"] == self.packet_priority:
cmd = "undo info-center syslog packet-priority %s" % self.packet_priority
self.updates_cmd.append(cmd)
self.changed = True
if self.changed:
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(msg='Error: Merge syslog global failed.')
def get_syslog_logfile(self):
"""get syslog logfile"""
cur_logfile_info = dict()
conf_str = CE_NC_GET_LOG_FILE_INFO_HEADER
conf_str += "<logFileType>log</logFileType>"
if self.logfile_max_num:
conf_str += "<maxFileNum></maxFileNum>"
if self.logfile_max_size:
conf_str += "<maxFileSize></maxFileSize>"
conf_str += CE_NC_GET_LOG_FILE_INFO_TAIL
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return cur_logfile_info
else:
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
logfile_info = root.findall(
"data/syslog/icLogFileInfos/icLogFileInfo")
if logfile_info:
for tmp in logfile_info:
for site in tmp:
if site.tag in ["maxFileNum", "maxFileSize"]:
cur_logfile_info[site.tag] = site.text
return cur_logfile_info
def merge_syslog_logfile(self):
"""config logfile"""
logfile_max_num = "200"
conf_str = CE_NC_MERGE_LOG_FILE_INFO_HEADER
if self.logfile_max_num:
if self.state == "present":
if self.cur_logfile_info["maxFileNum"] != self.logfile_max_num:
logfile_max_num = self.logfile_max_num
else:
if self.logfile_max_num != "200" and self.cur_logfile_info["maxFileNum"] == self.logfile_max_num:
logfile_max_num = "200"
conf_str += "<maxFileNum>%s</maxFileNum>" % logfile_max_num
if self.logfile_max_size:
logfile_max_size = "32"
if self.state == "present":
if self.cur_logfile_info["maxFileSize"] != self.logfile_max_size:
logfile_max_size = self.logfile_max_size
else:
if self.logfile_max_size != "32" and self.cur_logfile_info["maxFileSize"] == self.logfile_max_size:
logfile_max_size = "32"
conf_str += "<maxFileSize>%s</maxFileSize>" % logfile_max_size
conf_str += "<logFileType>log</logFileType>"
conf_str += CE_NC_MERGE_LOG_FILE_INFO_TAIL
if self.state == "present":
if self.logfile_max_num:
if self.cur_logfile_info["maxFileNum"] != self.logfile_max_num:
cmd = "info-center max-logfile-number %s" % self.logfile_max_num
self.updates_cmd.append(cmd)
self.changed = True
if self.logfile_max_size:
if self.cur_logfile_info["maxFileSize"] != self.logfile_max_size:
cmd = "info-center logfile size %s" % self.logfile_max_size
self.updates_cmd.append(cmd)
self.changed = True
if self.state == "absent":
if self.logfile_max_num and self.logfile_max_num != "200":
if self.cur_logfile_info["maxFileNum"] == self.logfile_max_num:
cmd = "undo info-center max-logfile-number"
self.updates_cmd.append(cmd)
self.changed = True
if self.logfile_max_size and self.logfile_max_size != "32":
if self.cur_logfile_info["maxFileSize"] == self.logfile_max_size:
cmd = "undo info-center logfile size"
self.updates_cmd.append(cmd)
self.changed = True
if self.changed:
recv_xml = set_nc_config(self.module, conf_str)
if "<ok/>" not in recv_xml:
self.module.fail_json(
msg='Error: Merge syslog logfile failed.')
def check_params(self):
"""Check all input params"""
# packet_priority check
if self.packet_priority:
if not self.packet_priority.isdigit():
self.module.fail_json(
msg='Error: The parameter of packet priority is invalid.')
if int(self.packet_priority) > 7 or int(self.packet_priority) < 0:
self.module.fail_json(
msg='Error: The packet priority must be an integer between 0 and 7.')
# logfile_max_num check
if self.logfile_max_num:
if not self.logfile_max_num.isdigit():
self.module.fail_json(
msg='Error: The parameter of logfile_max_num is invalid.')
if int(self.logfile_max_num) > 500 or int(self.logfile_max_num) < 3:
self.module.fail_json(
msg='Error: The logfile_max_num must be an integer between 3 and 500.')
# channel_id check
if self.channel_id:
if not self.channel_id.isdigit():
self.module.fail_json(
msg='Error: The parameter of channel_id is invalid.')
if int(self.channel_id) > 9 or int(self.channel_id) < 0:
self.module.fail_json(
msg='Error: The channel_id must be an integer between 0 and 9.')
# channel_cfg_name check
if self.channel_cfg_name:
if len(self.channel_cfg_name) > 30 \
or len(self.channel_cfg_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: channel_cfg_name is not in the range from 1 to 30.')
# filter_feature_name check
if self.filter_feature_name:
if len(self.filter_feature_name) > 31 \
or len(self.filter_feature_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: filter_feature_name is not in the range from 1 to 31.')
# filter_log_name check
if self.filter_log_name:
if len(self.filter_log_name) > 63 \
or len(self.filter_log_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: filter_log_name is not in the range from 1 to 63.')
# server_ip check
if self.server_ip:
if not check_ip_addr(self.server_ip):
self.module.fail_json(
msg='Error: The %s is not a valid ip address' % self.server_ip)
# source_ip check
if self.source_ip:
if not check_ip_addr(self.source_ip):
self.module.fail_json(
msg='Error: The %s is not a valid ip address' % self.source_ip)
# server_domain check
if self.server_domain:
if len(self.server_domain) > 255 \
or len(self.server_domain.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: server_domain is not in the range from 1 to 255.')
# vrf_name check
if self.vrf_name:
if len(self.vrf_name) > 31 \
or len(self.vrf_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: vrf_name is not in the range from 1 to 31.')
# server_port check
if self.server_port:
if not self.server_port.isdigit():
self.module.fail_json(
msg='Error: The parameter of server_port is invalid.')
if int(self.server_port) > 65535 or int(self.server_port) < 1:
self.module.fail_json(
msg='Error: The server_port must be an integer between 1 and 65535.')
# channel_name check
if self.channel_name:
if len(self.channel_name) > 31 \
or len(self.channel_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: channel_name is not in the range from 1 to 30.')
# ssl_policy_name check
if self.ssl_policy_name:
if len(self.ssl_policy_name) > 23 \
or len(self.ssl_policy_name.replace(' ', '')) < 1:
self.module.fail_json(
msg='Error: ssl_policy_name is not in the range from 1 to 23.')
def get_proposed(self):
"""get proposed info"""
if self.info_center_enable:
self.proposed["info_center_enable"] = self.info_center_enable
if self.packet_priority:
self.proposed["packet_priority"] = self.packet_priority
if self.suppress_enable:
self.proposed["suppress_enable"] = self.suppress_enable
if self.logfile_max_num:
self.proposed["logfile_max_num"] = self.logfile_max_num
if self.logfile_max_size:
self.proposed["logfile_max_size"] = self.logfile_max_size
if self.channel_id:
self.proposed["channel_id"] = self.channel_id
if self.channel_cfg_name:
self.proposed["channel_cfg_name"] = self.channel_cfg_name
if self.channel_out_direct:
self.proposed["channel_out_direct"] = self.channel_out_direct
if self.filter_feature_name:
self.proposed["filter_feature_name"] = self.filter_feature_name
if self.filter_log_name:
self.proposed["filter_log_name"] = self.filter_log_name
if self.ip_type:
self.proposed["ip_type"] = self.ip_type
if self.server_ip:
self.proposed["server_ip"] = self.server_ip
if self.server_domain:
self.proposed["server_domain"] = self.server_domain
if self.vrf_name:
self.proposed["vrf_name"] = self.vrf_name
if self.level:
self.proposed["level"] = self.level
if self.server_port:
self.proposed["server_port"] = self.server_port
if self.facility:
self.proposed["facility"] = self.facility
if self.channel_name:
self.proposed["channel_name"] = self.channel_name
if self.timestamp:
self.proposed["timestamp"] = self.timestamp
if self.ssl_policy_name:
self.proposed["ssl_policy_name"] = self.ssl_policy_name
if self.transport_mode:
self.proposed["transport_mode"] = self.transport_mode
if self.is_default_vpn:
self.proposed["is_default_vpn"] = self.is_default_vpn
if self.source_ip:
self.proposed["source_ip"] = self.source_ip
if self.state:
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if self.info_center_enable:
self.existing["info_center_enable"] = self.cur_global_info[
"icEnable"]
if self.packet_priority:
self.existing["packet_priority"] = self.cur_global_info[
"packetPriority"]
if self.suppress_enable:
self.existing["suppress_enable"] = self.cur_global_info[
"suppressEnable"]
if self.logfile_max_num:
self.existing["logfile_max_num"] = self.cur_logfile_info[
"maxFileNum"]
if self.logfile_max_size:
self.existing["logfile_max_size"] = self.cur_logfile_info[
"maxFileSize"]
if self.channel_id and self.channel_cfg_name:
if self.channel_info:
self.existing["channel_id_info"] = self.channel_info[
"channelInfos"]
if self.channel_out_direct and self.channel_id:
if self.channel_direct_info:
self.existing["channel_out_direct_info"] = self.channel_direct_info[
"channelDirectInfos"]
if self.filter_feature_name and self.filter_log_name:
if self.filter_info:
self.existing["filter_id_info"] = self.filter_info[
"filterInfos"]
if self.ip_type:
if self.server_ip_info:
self.existing["server_ip_info"] = self.server_ip_info[
"serverIpInfos"]
if self.server_domain:
if self.server_domain_info:
self.existing["server_domain_info"] = self.server_domain_info[
"serverAddressInfos"]
def get_end_state(self):
"""get end state info"""
if self.info_center_enable or self.packet_priority or self.suppress_enable:
self.cur_global_info = self.get_syslog_global()
if self.logfile_max_num or self.logfile_max_size:
self.cur_logfile_info = self.get_syslog_logfile()
if self.channel_id and self.channel_cfg_name:
self.channel_info = self.get_channel_dict()
if self.channel_out_direct and self.channel_id:
self.channel_direct_info = self.get_channel_direct_dict()
if self.filter_feature_name and self.filter_log_name:
self.filter_info = self.get_filter_dict()
if self.ip_type:
self.server_ip_info = self.get_server_ip_dict()
if self.server_domain:
self.server_domain_info = self.get_server_domain_dict()
if self.info_center_enable:
self.end_state[
"info_center_enable"] = self.cur_global_info["icEnable"]
if self.packet_priority:
self.end_state["packet_priority"] = self.cur_global_info[
"packetPriority"]
if self.suppress_enable:
self.end_state["suppress_enable"] = self.cur_global_info[
"suppressEnable"]
if self.logfile_max_num:
self.end_state["logfile_max_num"] = self.cur_logfile_info[
"maxFileNum"]
if self.logfile_max_size:
self.end_state["logfile_max_size"] = self.cur_logfile_info[
"maxFileSize"]
if self.channel_id and self.channel_cfg_name:
if self.channel_info:
self.end_state["channel_id_info"] = self.channel_info[
"channelInfos"]
if self.channel_out_direct and self.channel_id:
if self.channel_direct_info:
self.end_state["channel_out_direct_info"] = self.channel_direct_info[
"channelDirectInfos"]
if self.filter_feature_name and self.filter_log_name:
if self.filter_info:
self.end_state["filter_id_info"] = self.filter_info[
"filterInfos"]
if self.ip_type:
if self.server_ip_info:
self.end_state["server_ip_info"] = self.server_ip_info[
"serverIpInfos"]
if self.server_domain:
if self.server_domain_info:
self.end_state["server_domain_info"] = self.server_domain_info[
"serverAddressInfos"]
def work(self):
"""worker"""
self.check_params()
if self.info_center_enable or self.packet_priority or self.suppress_enable:
self.cur_global_info = self.get_syslog_global()
if self.logfile_max_num or self.logfile_max_size:
self.cur_logfile_info = self.get_syslog_logfile()
if self.channel_id:
self.channel_info = self.get_channel_dict()
if self.channel_out_direct:
self.channel_direct_info = self.get_channel_direct_dict()
if self.filter_feature_name and self.filter_log_name:
self.filter_info = self.get_filter_dict()
if self.ip_type:
self.server_ip_info = self.get_server_ip_dict()
if self.server_domain:
self.server_domain_info = self.get_server_domain_dict()
self.get_existing()
self.get_proposed()
if self.info_center_enable or self.packet_priority or self.suppress_enable:
self.merge_syslog_global()
if self.logfile_max_num or self.logfile_max_size:
self.merge_syslog_logfile()
if self.server_ip:
if not self.ip_type:
self.module.fail_json(
msg='Error: ip_type and server_ip must be exist at the same time.')
if self.ip_type:
if not self.server_ip:
self.module.fail_json(
msg='Error: ip_type and server_ip must be exist at the same time.')
if self.ip_type or self.server_domain or self.channel_id or self.filter_feature_name:
if self.ip_type and self.server_domain:
self.module.fail_json(
msg='Error: ip_type and server_domain can not be exist at the same time.')
if self.channel_id and self.channel_name:
self.module.fail_json(
msg='Error: channel_id and channel_name can not be exist at the same time.')
if self.ssl_policy_name:
if self.transport_mode == "udp":
self.module.fail_json(
msg='Error: transport_mode: udp does not support ssl_policy.')
if not self.transport_mode:
self.module.fail_json(
msg='Error: transport_mode, ssl_policy_name must be exist at the same time.')
if self.ip_type == "ipv6":
if self.vrf_name and self.vrf_name != "_public_":
self.module.fail_json(
msg='Error: ipType:ipv6 only support default vpn:_public_.')
if self.is_default_vpn is True:
if self.vrf_name:
if self.vrf_name != "_public_":
self.module.fail_json(
msg='Error: vrf_name should be _public_ when is_default_vpn is True.')
else:
self.vrf_name = "_public_"
else:
if self.vrf_name == "_public_":
self.module.fail_json(
msg='Error: The default vpn value is _public_, but is_default_vpn is False.')
if self.state == "present":
# info-center channel channel-number name channel-name
if self.channel_id and self.channel_cfg_name:
self.config_merge_syslog_channel(
self.channel_id, self.channel_cfg_name)
# info-center { console | logfile | monitor | snmp | logbuffer
# | trapbuffer } channel channel-number
if self.channel_out_direct and self.channel_id:
self.config_merge_out_direct(
self.channel_out_direct, self.channel_id)
# info-center filter-id bymodule-alias modname alias
if self.filter_feature_name and self.filter_log_name:
self.config_merge_filter(
self.filter_feature_name, self.filter_log_name)
if self.ip_type and self.server_ip:
if not self.vrf_name:
self.vrf_name = "_public_"
if self.check_need_loghost_cfg():
self.config_merge_loghost()
if self.server_domain:
if not self.vrf_name:
self.vrf_name = "_public_"
if self.check_need_loghost_cfg():
self.config_merge_loghost()
elif self.state == "absent":
if self.channel_id:
self.delete_merge_syslog_channel(
self.channel_id, self.channel_cfg_name)
if self.channel_out_direct:
self.delete_merge_out_direct(
self.channel_out_direct, self.channel_id)
if self.filter_feature_name and self.filter_log_name:
self.delete_merge_filter(
self.filter_feature_name, self.filter_log_name)
if self.ip_type and self.server_ip:
if not self.vrf_name:
self.vrf_name = "_public_"
if self.check_need_loghost_cfg():
self.delete_merge_loghost()
if self.server_domain:
if not self.vrf_name:
self.vrf_name = "_public_"
if self.check_need_loghost_cfg():
self.delete_merge_loghost()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
info_center_enable=dict(choices=['true', 'false']),
packet_priority=dict(type='str'),
suppress_enable=dict(choices=['true', 'false']),
logfile_max_num=dict(type='str'),
logfile_max_size=dict(choices=['4', '8', '16', '32']),
channel_id=dict(type='str'),
channel_cfg_name=dict(type='str'),
channel_out_direct=dict(choices=['console', 'monitor',
'trapbuffer', 'logbuffer', 'snmp', 'logfile']),
filter_feature_name=dict(type='str'),
filter_log_name=dict(type='str'),
ip_type=dict(choices=['ipv4', 'ipv6']),
server_ip=dict(type='str'),
server_domain=dict(type='str'),
is_default_vpn=dict(default=False, type='bool'),
vrf_name=dict(type='str'),
level=dict(choices=['emergencies', 'alert', 'critical', 'error', 'warning', 'notification',
'informational', 'debugging']),
server_port=dict(type='str'),
facility=dict(choices=['local0', 'local1', 'local2',
'local3', 'local4', 'local5', 'local6', 'local7']),
channel_name=dict(type='str'),
timestamp=dict(choices=['UTC', 'localtime']),
transport_mode=dict(choices=['tcp', 'udp']),
ssl_policy_name=dict(type='str'),
source_ip=dict(type='str'),
state=dict(choices=['present', 'absent'], default='present')
)
argument_spec.update(ce_argument_spec)
module = InfoCenterGlobal(argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
ThirdProject/android_external_chromium_org | tools/bisect-builds.py | 23 | 33494 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Snapshot Build Bisect Tool
This script bisects a snapshot archive using binary search. It starts at
a bad revision (it will try to guess HEAD) and asks for a last known-good
revision. It will then binary search across this revision range by downloading,
unzipping, and opening Chromium for you. After testing the specific revision,
it will ask you whether it is good or bad before continuing the search.
"""
# The root URL for storage.
CHROMIUM_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-browser-snapshots'
WEBKIT_BASE_URL = 'http://commondatastorage.googleapis.com/chromium-webkit-snapshots'
# The root URL for official builds.
OFFICIAL_BASE_URL = 'http://master.chrome.corp.google.com/official_builds'
# Changelogs URL.
CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
'perf/dashboard/ui/changelog.html?' \
'url=/trunk/src&range=%d%%3A%d'
# Official Changelogs URL.
OFFICIAL_CHANGELOG_URL = 'http://omahaproxy.appspot.com/'\
'changelog?old_version=%s&new_version=%s'
# DEPS file URL.
DEPS_FILE = 'http://src.chromium.org/viewvc/chrome/trunk/src/DEPS?revision=%d'
# Blink Changelogs URL.
BLINK_CHANGELOG_URL = 'http://build.chromium.org/f/chromium/' \
'perf/dashboard/ui/changelog_blink.html?' \
'url=/trunk&range=%d%%3A%d'
DONE_MESSAGE_GOOD_MIN = 'You are probably looking for a change made after %s ' \
'(known good), but no later than %s (first known bad).'
DONE_MESSAGE_GOOD_MAX = 'You are probably looking for a change made after %s ' \
'(known bad), but no later than %s (first known good).'
###############################################################################
import json
import optparse
import os
import re
import shlex
import shutil
import subprocess
import sys
import tempfile
import threading
import urllib
from distutils.version import LooseVersion
from xml.etree import ElementTree
import zipfile
class PathContext(object):
"""A PathContext is used to carry the information used to construct URLs and
paths when dealing with the storage server and archives."""
def __init__(self, base_url, platform, good_revision, bad_revision,
is_official, is_aura, flash_path = None):
super(PathContext, self).__init__()
# Store off the input parameters.
self.base_url = base_url
self.platform = platform # What's passed in to the '-a/--archive' option.
self.good_revision = good_revision
self.bad_revision = bad_revision
self.is_official = is_official
self.is_aura = is_aura
self.flash_path = flash_path
# The name of the ZIP file in a revision directory on the server.
self.archive_name = None
# Set some internal members:
# _listing_platform_dir = Directory that holds revisions. Ends with a '/'.
# _archive_extract_dir = Uncompressed directory in the archive_name file.
# _binary_name = The name of the executable to run.
if self.platform in ('linux', 'linux64', 'linux-arm'):
self._binary_name = 'chrome'
elif self.platform == 'mac':
self.archive_name = 'chrome-mac.zip'
self._archive_extract_dir = 'chrome-mac'
elif self.platform == 'win':
self.archive_name = 'chrome-win32.zip'
self._archive_extract_dir = 'chrome-win32'
self._binary_name = 'chrome.exe'
else:
raise Exception('Invalid platform: %s' % self.platform)
if is_official:
if self.platform == 'linux':
self._listing_platform_dir = 'precise32bit/'
self.archive_name = 'chrome-precise32bit.zip'
self._archive_extract_dir = 'chrome-precise32bit'
elif self.platform == 'linux64':
self._listing_platform_dir = 'precise64bit/'
self.archive_name = 'chrome-precise64bit.zip'
self._archive_extract_dir = 'chrome-precise64bit'
elif self.platform == 'mac':
self._listing_platform_dir = 'mac/'
self._binary_name = 'Google Chrome.app/Contents/MacOS/Google Chrome'
elif self.platform == 'win':
if self.is_aura:
self._listing_platform_dir = 'win-aura/'
else:
self._listing_platform_dir = 'win/'
else:
if self.platform in ('linux', 'linux64', 'linux-arm'):
self.archive_name = 'chrome-linux.zip'
self._archive_extract_dir = 'chrome-linux'
if self.platform == 'linux':
self._listing_platform_dir = 'Linux/'
elif self.platform == 'linux64':
self._listing_platform_dir = 'Linux_x64/'
elif self.platform == 'linux-arm':
self._listing_platform_dir = 'Linux_ARM_Cross-Compile/'
elif self.platform == 'mac':
self._listing_platform_dir = 'Mac/'
self._binary_name = 'Chromium.app/Contents/MacOS/Chromium'
elif self.platform == 'win':
self._listing_platform_dir = 'Win/'
def GetListingURL(self, marker=None):
"""Returns the URL for a directory listing, with an optional marker."""
marker_param = ''
if marker:
marker_param = '&marker=' + str(marker)
return self.base_url + '/?delimiter=/&prefix=' + \
self._listing_platform_dir + marker_param
def GetDownloadURL(self, revision):
"""Gets the download URL for a build archive of a specific revision."""
if self.is_official:
return "%s/%s/%s%s" % (
OFFICIAL_BASE_URL, revision, self._listing_platform_dir,
self.archive_name)
else:
return "%s/%s%s/%s" % (self.base_url, self._listing_platform_dir,
revision, self.archive_name)
def GetLastChangeURL(self):
"""Returns a URL to the LAST_CHANGE file."""
return self.base_url + '/' + self._listing_platform_dir + 'LAST_CHANGE'
def GetLaunchPath(self):
"""Returns a relative path (presumably from the archive extraction location)
that is used to run the executable."""
return os.path.join(self._archive_extract_dir, self._binary_name)
def IsAuraBuild(self, build):
"""Check the given build is Aura."""
return build.split('.')[3] == '1'
def IsASANBuild(self, build):
"""Check the given build is ASAN build."""
return build.split('.')[3] == '2'
def ParseDirectoryIndex(self):
"""Parses the Google Storage directory listing into a list of revision
numbers."""
def _FetchAndParse(url):
"""Fetches a URL and returns a 2-Tuple of ([revisions], next-marker). If
next-marker is not None, then the listing is a partial listing and another
fetch should be performed with next-marker being the marker= GET
parameter."""
handle = urllib.urlopen(url)
document = ElementTree.parse(handle)
# All nodes in the tree are namespaced. Get the root's tag name to extract
# the namespace. Etree does namespaces as |{namespace}tag|.
root_tag = document.getroot().tag
end_ns_pos = root_tag.find('}')
if end_ns_pos == -1:
raise Exception("Could not locate end namespace for directory index")
namespace = root_tag[:end_ns_pos + 1]
# Find the prefix (_listing_platform_dir) and whether or not the list is
# truncated.
prefix_len = len(document.find(namespace + 'Prefix').text)
next_marker = None
is_truncated = document.find(namespace + 'IsTruncated')
if is_truncated is not None and is_truncated.text.lower() == 'true':
next_marker = document.find(namespace + 'NextMarker').text
# Get a list of all the revisions.
all_prefixes = document.findall(namespace + 'CommonPrefixes/' +
namespace + 'Prefix')
# The <Prefix> nodes have content of the form of
# |_listing_platform_dir/revision/|. Strip off the platform dir and the
# trailing slash to just have a number.
revisions = []
for prefix in all_prefixes:
revnum = prefix.text[prefix_len:-1]
try:
revnum = int(revnum)
revisions.append(revnum)
except ValueError:
pass
return (revisions, next_marker)
# Fetch the first list of revisions.
(revisions, next_marker) = _FetchAndParse(self.GetListingURL())
# If the result list was truncated, refetch with the next marker. Do this
# until an entire directory listing is done.
while next_marker:
next_url = self.GetListingURL(next_marker)
(new_revisions, next_marker) = _FetchAndParse(next_url)
revisions.extend(new_revisions)
return revisions
def GetRevList(self):
"""Gets the list of revision numbers between self.good_revision and
self.bad_revision."""
# Download the revlist and filter for just the range between good and bad.
minrev = min(self.good_revision, self.bad_revision)
maxrev = max(self.good_revision, self.bad_revision)
revlist_all = map(int, self.ParseDirectoryIndex())
revlist = [x for x in revlist_all if x >= int(minrev) and x <= int(maxrev)]
revlist.sort()
# Set good and bad revisions to be legit revisions.
if revlist:
if self.good_revision < self.bad_revision:
self.good_revision = revlist[0]
self.bad_revision = revlist[-1]
else:
self.bad_revision = revlist[0]
self.good_revision = revlist[-1]
# Fix chromium rev so that the deps blink revision matches REVISIONS file.
if self.base_url == WEBKIT_BASE_URL:
revlist_all.sort()
self.good_revision = FixChromiumRevForBlink(revlist,
revlist_all,
self,
self.good_revision)
self.bad_revision = FixChromiumRevForBlink(revlist,
revlist_all,
self,
self.bad_revision)
return revlist
def GetOfficialBuildsList(self):
"""Gets the list of official build numbers between self.good_revision and
self.bad_revision."""
# Download the revlist and filter for just the range between good and bad.
minrev = min(self.good_revision, self.bad_revision)
maxrev = max(self.good_revision, self.bad_revision)
handle = urllib.urlopen(OFFICIAL_BASE_URL)
dirindex = handle.read()
handle.close()
build_numbers = re.findall(r'<a href="([0-9][0-9].*)/">', dirindex)
final_list = []
i = 0
parsed_build_numbers = [LooseVersion(x) for x in build_numbers]
for build_number in sorted(parsed_build_numbers):
path = OFFICIAL_BASE_URL + '/' + str(build_number) + '/' + \
self._listing_platform_dir + self.archive_name
i = i + 1
try:
connection = urllib.urlopen(path)
connection.close()
if build_number > maxrev:
break
if build_number >= minrev:
# If we are bisecting Aura, we want to include only builds which
# ends with ".1".
if self.is_aura:
if self.IsAuraBuild(str(build_number)):
final_list.append(str(build_number))
# If we are bisecting only official builds (without --aura),
# we can not include builds which ends with '.1' or '.2' since
# they have different folder hierarchy inside.
elif (not self.IsAuraBuild(str(build_number)) and
not self.IsASANBuild(str(build_number))):
final_list.append(str(build_number))
except urllib.HTTPError, e:
pass
return final_list
def UnzipFilenameToDir(filename, directory):
"""Unzip |filename| to |directory|."""
cwd = os.getcwd()
if not os.path.isabs(filename):
filename = os.path.join(cwd, filename)
zf = zipfile.ZipFile(filename)
# Make base.
if not os.path.isdir(directory):
os.mkdir(directory)
os.chdir(directory)
# Extract files.
for info in zf.infolist():
name = info.filename
if name.endswith('/'): # dir
if not os.path.isdir(name):
os.makedirs(name)
else: # file
directory = os.path.dirname(name)
if not os.path.isdir(directory):
os.makedirs(directory)
out = open(name, 'wb')
out.write(zf.read(name))
out.close()
# Set permissions. Permission info in external_attr is shifted 16 bits.
os.chmod(name, info.external_attr >> 16L)
os.chdir(cwd)
def FetchRevision(context, rev, filename, quit_event=None, progress_event=None):
"""Downloads and unzips revision |rev|.
@param context A PathContext instance.
@param rev The Chromium revision number/tag to download.
@param filename The destination for the downloaded file.
@param quit_event A threading.Event which will be set by the master thread to
indicate that the download should be aborted.
@param progress_event A threading.Event which will be set by the master thread
to indicate that the progress of the download should be
displayed.
"""
def ReportHook(blocknum, blocksize, totalsize):
if quit_event and quit_event.isSet():
raise RuntimeError("Aborting download of revision %s" % str(rev))
if progress_event and progress_event.isSet():
size = blocknum * blocksize
if totalsize == -1: # Total size not known.
progress = "Received %d bytes" % size
else:
size = min(totalsize, size)
progress = "Received %d of %d bytes, %.2f%%" % (
size, totalsize, 100.0 * size / totalsize)
# Send a \r to let all progress messages use just one line of output.
sys.stdout.write("\r" + progress)
sys.stdout.flush()
download_url = context.GetDownloadURL(rev)
try:
urllib.urlretrieve(download_url, filename, ReportHook)
if progress_event and progress_event.isSet():
print
except RuntimeError, e:
pass
def RunRevision(context, revision, zipfile, profile, num_runs, command, args):
"""Given a zipped revision, unzip it and run the test."""
print "Trying revision %s..." % str(revision)
# Create a temp directory and unzip the revision into it.
cwd = os.getcwd()
tempdir = tempfile.mkdtemp(prefix='bisect_tmp')
UnzipFilenameToDir(zipfile, tempdir)
os.chdir(tempdir)
# Run the build as many times as specified.
testargs = ['--user-data-dir=%s' % profile] + args
# The sandbox must be run as root on Official Chrome, so bypass it.
if ((context.is_official or context.flash_path) and
context.platform.startswith('linux')):
testargs.append('--no-sandbox')
if context.flash_path:
testargs.append('--ppapi-flash-path=%s' % context.flash_path)
# We have to pass a large enough Flash version, which currently needs not
# be correct. Instead of requiring the user of the script to figure out and
# pass the correct version we just spoof it.
testargs.append('--ppapi-flash-version=99.9.999.999')
runcommand = []
for token in shlex.split(command):
if token == "%a":
runcommand.extend(testargs)
else:
runcommand.append( \
token.replace('%p', context.GetLaunchPath()) \
.replace('%s', ' '.join(testargs)))
for i in range(0, num_runs):
subproc = subprocess.Popen(runcommand,
bufsize=-1,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = subproc.communicate()
os.chdir(cwd)
try:
shutil.rmtree(tempdir, True)
except Exception, e:
pass
return (subproc.returncode, stdout, stderr)
def AskIsGoodBuild(rev, official_builds, status, stdout, stderr):
"""Ask the user whether build |rev| is good or bad."""
# Loop until we get a response that we can parse.
while True:
response = raw_input('Revision %s is ' \
'[(g)ood/(b)ad/(r)etry/(u)nknown/(q)uit]: ' %
str(rev))
if response and response in ('g', 'b', 'r', 'u'):
return response
if response and response == 'q':
raise SystemExit()
class DownloadJob(object):
"""DownloadJob represents a task to download a given Chromium revision."""
def __init__(self, context, name, rev, zipfile):
super(DownloadJob, self).__init__()
# Store off the input parameters.
self.context = context
self.name = name
self.rev = rev
self.zipfile = zipfile
self.quit_event = threading.Event()
self.progress_event = threading.Event()
def Start(self):
"""Starts the download."""
fetchargs = (self.context,
self.rev,
self.zipfile,
self.quit_event,
self.progress_event)
self.thread = threading.Thread(target=FetchRevision,
name=self.name,
args=fetchargs)
self.thread.start()
def Stop(self):
"""Stops the download which must have been started previously."""
self.quit_event.set()
self.thread.join()
os.unlink(self.zipfile)
def WaitFor(self):
"""Prints a message and waits for the download to complete. The download
must have been started previously."""
print "Downloading revision %s..." % str(self.rev)
self.progress_event.set() # Display progress of download.
self.thread.join()
def Bisect(base_url,
platform,
official_builds,
is_aura,
good_rev=0,
bad_rev=0,
num_runs=1,
command="%p %a",
try_args=(),
profile=None,
flash_path=None,
evaluate=AskIsGoodBuild):
"""Given known good and known bad revisions, run a binary search on all
archived revisions to determine the last known good revision.
@param platform Which build to download/run ('mac', 'win', 'linux64', etc.).
@param official_builds Specify build type (Chromium or Official build).
@param good_rev Number/tag of the known good revision.
@param bad_rev Number/tag of the known bad revision.
@param num_runs Number of times to run each build for asking good/bad.
@param try_args A tuple of arguments to pass to the test application.
@param profile The name of the user profile to run with.
@param evaluate A function which returns 'g' if the argument build is good,
'b' if it's bad or 'u' if unknown.
Threading is used to fetch Chromium revisions in the background, speeding up
the user's experience. For example, suppose the bounds of the search are
good_rev=0, bad_rev=100. The first revision to be checked is 50. Depending on
whether revision 50 is good or bad, the next revision to check will be either
25 or 75. So, while revision 50 is being checked, the script will download
revisions 25 and 75 in the background. Once the good/bad verdict on rev 50 is
known:
- If rev 50 is good, the download of rev 25 is cancelled, and the next test
is run on rev 75.
- If rev 50 is bad, the download of rev 75 is cancelled, and the next test
is run on rev 25.
"""
if not profile:
profile = 'profile'
context = PathContext(base_url, platform, good_rev, bad_rev,
official_builds, is_aura, flash_path)
cwd = os.getcwd()
print "Downloading list of known revisions..."
_GetDownloadPath = lambda rev: os.path.join(cwd,
'%s-%s' % (str(rev), context.archive_name))
if official_builds:
revlist = context.GetOfficialBuildsList()
else:
revlist = context.GetRevList()
# Get a list of revisions to bisect across.
if len(revlist) < 2: # Don't have enough builds to bisect.
msg = 'We don\'t have enough builds to bisect. revlist: %s' % revlist
raise RuntimeError(msg)
# Figure out our bookends and first pivot point; fetch the pivot revision.
minrev = 0
maxrev = len(revlist) - 1
pivot = maxrev / 2
rev = revlist[pivot]
zipfile = _GetDownloadPath(rev)
fetch = DownloadJob(context, 'initial_fetch', rev, zipfile)
fetch.Start()
fetch.WaitFor()
# Binary search time!
while fetch and fetch.zipfile and maxrev - minrev > 1:
if bad_rev < good_rev:
min_str, max_str = "bad", "good"
else:
min_str, max_str = "good", "bad"
print 'Bisecting range [%s (%s), %s (%s)].' % (revlist[minrev], min_str, \
revlist[maxrev], max_str)
# Pre-fetch next two possible pivots
# - down_pivot is the next revision to check if the current revision turns
# out to be bad.
# - up_pivot is the next revision to check if the current revision turns
# out to be good.
down_pivot = int((pivot - minrev) / 2) + minrev
down_fetch = None
if down_pivot != pivot and down_pivot != minrev:
down_rev = revlist[down_pivot]
down_fetch = DownloadJob(context, 'down_fetch', down_rev,
_GetDownloadPath(down_rev))
down_fetch.Start()
up_pivot = int((maxrev - pivot) / 2) + pivot
up_fetch = None
if up_pivot != pivot and up_pivot != maxrev:
up_rev = revlist[up_pivot]
up_fetch = DownloadJob(context, 'up_fetch', up_rev,
_GetDownloadPath(up_rev))
up_fetch.Start()
# Run test on the pivot revision.
status = None
stdout = None
stderr = None
try:
(status, stdout, stderr) = RunRevision(context,
rev,
fetch.zipfile,
profile,
num_runs,
command,
try_args)
except Exception, e:
print >> sys.stderr, e
# Call the evaluate function to see if the current revision is good or bad.
# On that basis, kill one of the background downloads and complete the
# other, as described in the comments above.
try:
answer = evaluate(rev, official_builds, status, stdout, stderr)
if answer == 'g' and good_rev < bad_rev or \
answer == 'b' and bad_rev < good_rev:
fetch.Stop()
minrev = pivot
if down_fetch:
down_fetch.Stop() # Kill the download of the older revision.
fetch = None
if up_fetch:
up_fetch.WaitFor()
pivot = up_pivot
fetch = up_fetch
elif answer == 'b' and good_rev < bad_rev or \
answer == 'g' and bad_rev < good_rev:
fetch.Stop()
maxrev = pivot
if up_fetch:
up_fetch.Stop() # Kill the download of the newer revision.
fetch = None
if down_fetch:
down_fetch.WaitFor()
pivot = down_pivot
fetch = down_fetch
elif answer == 'r':
pass # Retry requires no changes.
elif answer == 'u':
# Nuke the revision from the revlist and choose a new pivot.
fetch.Stop()
revlist.pop(pivot)
maxrev -= 1 # Assumes maxrev >= pivot.
if maxrev - minrev > 1:
# Alternate between using down_pivot or up_pivot for the new pivot
# point, without affecting the range. Do this instead of setting the
# pivot to the midpoint of the new range because adjacent revisions
# are likely affected by the same issue that caused the (u)nknown
# response.
if up_fetch and down_fetch:
fetch = [up_fetch, down_fetch][len(revlist) % 2]
elif up_fetch:
fetch = up_fetch
else:
fetch = down_fetch
fetch.WaitFor()
if fetch == up_fetch:
pivot = up_pivot - 1 # Subtracts 1 because revlist was resized.
else:
pivot = down_pivot
zipfile = fetch.zipfile
if down_fetch and fetch != down_fetch:
down_fetch.Stop()
if up_fetch and fetch != up_fetch:
up_fetch.Stop()
else:
assert False, "Unexpected return value from evaluate(): " + answer
except SystemExit:
print "Cleaning up..."
for f in [_GetDownloadPath(revlist[down_pivot]),
_GetDownloadPath(revlist[up_pivot])]:
try:
os.unlink(f)
except OSError:
pass
sys.exit(0)
rev = revlist[pivot]
return (revlist[minrev], revlist[maxrev])
def GetBlinkDEPSRevisionForChromiumRevision(rev):
"""Returns the blink revision that was in REVISIONS file at
chromium revision |rev|."""
# . doesn't match newlines without re.DOTALL, so this is safe.
blink_re = re.compile(r'webkit_revision\D*(\d+)')
url = urllib.urlopen(DEPS_FILE % rev)
m = blink_re.search(url.read())
url.close()
if m:
return int(m.group(1))
else:
raise Exception('Could not get Blink revision for Chromium rev %d'
% rev)
def GetBlinkRevisionForChromiumRevision(self, rev):
"""Returns the blink revision that was in REVISIONS file at
chromium revision |rev|."""
file_url = "%s/%s%d/REVISIONS" % (self.base_url,
self._listing_platform_dir, rev)
url = urllib.urlopen(file_url)
data = json.loads(url.read())
url.close()
if 'webkit_revision' in data:
return data['webkit_revision']
else:
raise Exception('Could not get blink revision for cr rev %d' % rev)
def FixChromiumRevForBlink(revisions_final, revisions, self, rev):
"""Returns the chromium revision that has the correct blink revision
for blink bisect, DEPS and REVISIONS file might not match since
blink snapshots point to tip of tree blink.
Note: The revisions_final variable might get modified to include
additional revisions."""
blink_deps_rev = GetBlinkDEPSRevisionForChromiumRevision(rev)
while (GetBlinkRevisionForChromiumRevision(self, rev) > blink_deps_rev):
idx = revisions.index(rev)
if idx > 0:
rev = revisions[idx-1]
if rev not in revisions_final:
revisions_final.insert(0, rev)
revisions_final.sort()
return rev
def GetChromiumRevision(url):
"""Returns the chromium revision read from given URL."""
try:
# Location of the latest build revision number
return int(urllib.urlopen(url).read())
except Exception, e:
print('Could not determine latest revision. This could be bad...')
return 999999999
def main():
usage = ('%prog [options] [-- chromium-options]\n'
'Perform binary search on the snapshot builds to find a minimal\n'
'range of revisions where a behavior change happened. The\n'
'behaviors are described as "good" and "bad".\n'
'It is NOT assumed that the behavior of the later revision is\n'
'the bad one.\n'
'\n'
'Revision numbers should use\n'
' Official versions (e.g. 1.0.1000.0) for official builds. (-o)\n'
' SVN revisions (e.g. 123456) for chromium builds, from trunk.\n'
' Use base_trunk_revision from http://omahaproxy.appspot.com/\n'
' for earlier revs.\n'
' Chrome\'s about: build number and omahaproxy branch_revision\n'
' are incorrect, they are from branches.\n'
'\n'
'Tip: add "-- --no-first-run" to bypass the first run prompts.')
parser = optparse.OptionParser(usage=usage)
# Strangely, the default help output doesn't include the choice list.
choices = ['mac', 'win', 'linux', 'linux64', 'linux-arm']
# linux-chromiumos lacks a continuous archive http://crbug.com/78158
parser.add_option('-a', '--archive',
choices = choices,
help = 'The buildbot archive to bisect [%s].' %
'|'.join(choices))
parser.add_option('-o', action="store_true", dest='official_builds',
help = 'Bisect across official ' +
'Chrome builds (internal only) instead of ' +
'Chromium archives.')
parser.add_option('-b', '--bad', type = 'str',
help = 'A bad revision to start bisection. ' +
'May be earlier or later than the good revision. ' +
'Default is HEAD.')
parser.add_option('-f', '--flash_path', type = 'str',
help = 'Absolute path to a recent Adobe Pepper Flash ' +
'binary to be used in this bisection (e.g. ' +
'on Windows C:\...\pepflashplayer.dll and on Linux ' +
'/opt/google/chrome/PepperFlash/libpepflashplayer.so).')
parser.add_option('-g', '--good', type = 'str',
help = 'A good revision to start bisection. ' +
'May be earlier or later than the bad revision. ' +
'Default is 0.')
parser.add_option('-p', '--profile', '--user-data-dir', type = 'str',
help = 'Profile to use; this will not reset every run. ' +
'Defaults to a clean profile.', default = 'profile')
parser.add_option('-t', '--times', type = 'int',
help = 'Number of times to run each build before asking ' +
'if it\'s good or bad. Temporary profiles are reused.',
default = 1)
parser.add_option('-c', '--command', type = 'str',
help = 'Command to execute. %p and %a refer to Chrome ' +
'executable and specified extra arguments respectively. ' +
'Use %s to specify all extra arguments as one string. ' +
'Defaults to "%p %a". Note that any extra paths ' +
'specified should be absolute.',
default = '%p %a')
parser.add_option('-l', '--blink', action='store_true',
help = 'Use Blink bisect instead of Chromium. ')
parser.add_option('--aura',
dest='aura',
action='store_true',
default=False,
help='Allow the script to bisect aura builds')
(opts, args) = parser.parse_args()
if opts.archive is None:
print 'Error: missing required parameter: --archive'
print
parser.print_help()
return 1
if opts.aura:
if opts.archive != 'win' or not opts.official_builds:
print 'Error: Aura is supported only on Windows platform '\
'and official builds.'
return 1
if opts.blink:
base_url = WEBKIT_BASE_URL
else:
base_url = CHROMIUM_BASE_URL
# Create the context. Initialize 0 for the revisions as they are set below.
context = PathContext(base_url, opts.archive, 0, 0,
opts.official_builds, opts.aura, None)
# Pick a starting point, try to get HEAD for this.
if opts.bad:
bad_rev = opts.bad
else:
bad_rev = '999.0.0.0'
if not opts.official_builds:
bad_rev = GetChromiumRevision(context.GetLastChangeURL())
# Find out when we were good.
if opts.good:
good_rev = opts.good
else:
good_rev = '0.0.0.0' if opts.official_builds else 0
if opts.flash_path:
flash_path = opts.flash_path
msg = 'Could not find Flash binary at %s' % flash_path
assert os.path.exists(flash_path), msg
if opts.official_builds:
good_rev = LooseVersion(good_rev)
bad_rev = LooseVersion(bad_rev)
else:
good_rev = int(good_rev)
bad_rev = int(bad_rev)
if opts.times < 1:
print('Number of times to run (%d) must be greater than or equal to 1.' %
opts.times)
parser.print_help()
return 1
(min_chromium_rev, max_chromium_rev) = Bisect(
base_url, opts.archive, opts.official_builds, opts.aura, good_rev,
bad_rev, opts.times, opts.command, args, opts.profile, opts.flash_path)
# Get corresponding blink revisions.
try:
min_blink_rev = GetBlinkRevisionForChromiumRevision(context,
min_chromium_rev)
max_blink_rev = GetBlinkRevisionForChromiumRevision(context,
max_chromium_rev)
except Exception, e:
# Silently ignore the failure.
min_blink_rev, max_blink_rev = 0, 0
if opts.blink:
# We're done. Let the user know the results in an official manner.
if good_rev > bad_rev:
print DONE_MESSAGE_GOOD_MAX % (str(min_blink_rev), str(max_blink_rev))
else:
print DONE_MESSAGE_GOOD_MIN % (str(min_blink_rev), str(max_blink_rev))
print 'BLINK CHANGELOG URL:'
print ' ' + BLINK_CHANGELOG_URL % (max_blink_rev, min_blink_rev)
else:
# We're done. Let the user know the results in an official manner.
if good_rev > bad_rev:
print DONE_MESSAGE_GOOD_MAX % (str(min_chromium_rev),
str(max_chromium_rev))
else:
print DONE_MESSAGE_GOOD_MIN % (str(min_chromium_rev),
str(max_chromium_rev))
if min_blink_rev != max_blink_rev:
print ("NOTE: There is a Blink roll in the range, "
"you might also want to do a Blink bisect.")
print 'CHANGELOG URL:'
if opts.official_builds:
print OFFICIAL_CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
else:
print ' ' + CHANGELOG_URL % (min_chromium_rev, max_chromium_rev)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
vishnu-kumar/PeformanceFramework | rally/plugins/openstack/context/cleanup/context.py | 10 | 3205 | # Copyright 2014: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
from rally.common.i18n import _
from rally.common import log as logging
from rally.common import utils as rutils
from rally import consts
from rally import exceptions
from rally.plugins.openstack.context.cleanup import manager
from rally.task import context
LOG = logging.getLogger(__name__)
class NoSuchCleanupResources(exceptions.RallyException):
msg_fmt = _("Missing cleanup resource managers: %(message)s")
class CleanupMixin(object):
CONFIG_SCHEMA = {
"type": "array",
"$schema": consts.JSON_SCHEMA,
"items": {
"type": "string",
},
"additionalProperties": False
}
def setup(self):
pass
# NOTE(amaretskiy): Set order to run this just before UserCleanup
@context.configure(name="admin_cleanup", order=(sys.maxsize - 1), hidden=True)
class AdminCleanup(CleanupMixin, context.Context):
"""Context class for admin resources cleanup."""
@classmethod
def validate(cls, config, non_hidden=False):
super(AdminCleanup, cls).validate(config, non_hidden)
missing = set(config)
missing -= manager.list_resource_names(admin_required=True)
missing = ", ".join(missing)
if missing:
LOG.info(_("Couldn't find cleanup resource managers: %s")
% missing)
raise NoSuchCleanupResources(missing)
@rutils.log_task_wrapper(LOG.info, _("admin resources cleanup"))
def cleanup(self):
manager.cleanup(names=self.config,
admin_required=True,
admin=self.context["admin"],
users=self.context.get("users", []))
# NOTE(amaretskiy): Set maximum order to run this last
@context.configure(name="cleanup", order=sys.maxsize, hidden=True)
class UserCleanup(CleanupMixin, context.Context):
"""Context class for user resources cleanup."""
@classmethod
def validate(cls, config, non_hidden=False):
super(UserCleanup, cls).validate(config, non_hidden)
missing = set(config)
missing -= manager.list_resource_names(admin_required=False)
missing = ", ".join(missing)
if missing:
LOG.info(_("Couldn't find cleanup resource managers: %s")
% missing)
raise NoSuchCleanupResources(missing)
@rutils.log_task_wrapper(LOG.info, _("user resources cleanup"))
def cleanup(self):
manager.cleanup(names=self.config,
admin_required=False,
users=self.context.get("users", []))
| apache-2.0 |
YanTangZhai/tf | tensorflow/python/kernel_tests/string_to_number_op_test.py | 9 | 2900 | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for StringToNumber op from parsing_ops."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.python.platform
import tensorflow as tf
_ERROR_MESSAGE = "StringToNumberOp could not correctly convert string: "
class StringToNumberOpTest(tf.test.TestCase):
def testToFloat(self):
with self.test_session():
input_string = tf.placeholder(tf.string)
output = tf.string_to_number(
input_string,
out_type=tf.float32)
result = output.eval(feed_dict={
input_string: ["0",
"3",
"-1",
"1.12",
"0xF",
" -10.5",
"3.40282e+38",
# The next two exceed maximum value for float, so we
# expect +/-INF to be returned instead.
"3.40283e+38",
"-3.40283e+38",
"NAN",
"INF"]
})
self.assertAllClose([0, 3, -1, 1.12, 0xF, -10.5, 3.40282e+38,
float("INF"), float("-INF"), float("NAN"),
float("INF")], result)
with self.assertRaisesOpError(_ERROR_MESSAGE + "10foobar"):
output.eval(feed_dict={input_string: ["10foobar"]})
def testToInt32(self):
with self.test_session():
input_string = tf.placeholder(tf.string)
output = tf.string_to_number(
input_string,
out_type=tf.int32)
result = output.eval(feed_dict={
input_string: ["0", "3", "-1", " -10", "-2147483648", "2147483647"]
})
self.assertAllEqual([0, 3, -1, -10, -2147483648, 2147483647], result)
with self.assertRaisesOpError(_ERROR_MESSAGE + "2.9"):
output.eval(feed_dict={input_string: ["2.9"]})
# The next two exceed maximum value of int32.
for in_string in ["-2147483649", "2147483648"]:
with self.assertRaisesOpError(_ERROR_MESSAGE + in_string):
output.eval(feed_dict={input_string: [in_string]})
if __name__ == "__main__":
tf.test.main()
| apache-2.0 |
Mhynlo/SickRage | lib/github/NamedUser.py | 71 | 22621 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2012 Steve English <steve.english@navetas.com> #
# Copyright 2012 Vincent Jacques <vincent@vincent-jacques.net> #
# Copyright 2012 Zearin <zearin@gonk.net> #
# Copyright 2013 AKFish <akfish@gmail.com> #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.PaginatedList
import github.Gist
import github.Repository
import github.NamedUser
import github.Plan
import github.Organization
import github.Event
class NamedUser(github.GithubObject.CompletableGithubObject):
"""
This class represents NamedUsers as returned for example by http://developer.github.com/v3/todo
"""
@property
def avatar_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._avatar_url)
return self._avatar_url.value
@property
def bio(self):
"""
:type: string
"""
self._completeIfNotSet(self._bio)
return self._bio.value
@property
def blog(self):
"""
:type: string
"""
self._completeIfNotSet(self._blog)
return self._blog.value
@property
def collaborators(self):
"""
:type: integer
"""
self._completeIfNotSet(self._collaborators)
return self._collaborators.value
@property
def company(self):
"""
:type: string
"""
self._completeIfNotSet(self._company)
return self._company.value
@property
def contributions(self):
"""
:type: integer
"""
self._completeIfNotSet(self._contributions)
return self._contributions.value
@property
def created_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._created_at)
return self._created_at.value
@property
def disk_usage(self):
"""
:type: integer
"""
self._completeIfNotSet(self._disk_usage)
return self._disk_usage.value
@property
def email(self):
"""
:type: string
"""
self._completeIfNotSet(self._email)
return self._email.value
@property
def events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._events_url)
return self._events_url.value
@property
def followers(self):
"""
:type: integer
"""
self._completeIfNotSet(self._followers)
return self._followers.value
@property
def followers_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._followers_url)
return self._followers_url.value
@property
def following(self):
"""
:type: integer
"""
self._completeIfNotSet(self._following)
return self._following.value
@property
def following_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._following_url)
return self._following_url.value
@property
def gists_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._gists_url)
return self._gists_url.value
@property
def gravatar_id(self):
"""
:type: string
"""
self._completeIfNotSet(self._gravatar_id)
return self._gravatar_id.value
@property
def hireable(self):
"""
:type: bool
"""
self._completeIfNotSet(self._hireable)
return self._hireable.value
@property
def html_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._html_url)
return self._html_url.value
@property
def id(self):
"""
:type: integer
"""
self._completeIfNotSet(self._id)
return self._id.value
@property
def location(self):
"""
:type: string
"""
self._completeIfNotSet(self._location)
return self._location.value
@property
def login(self):
"""
:type: string
"""
self._completeIfNotSet(self._login)
return self._login.value
@property
def name(self):
"""
:type: string
"""
self._completeIfNotSet(self._name)
return self._name.value
@property
def organizations_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._organizations_url)
return self._organizations_url.value
@property
def owned_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._owned_private_repos)
return self._owned_private_repos.value
@property
def plan(self):
"""
:type: :class:`github.Plan.Plan`
"""
self._completeIfNotSet(self._plan)
return self._plan.value
@property
def private_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._private_gists)
return self._private_gists.value
@property
def public_gists(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_gists)
return self._public_gists.value
@property
def public_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._public_repos)
return self._public_repos.value
@property
def received_events_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._received_events_url)
return self._received_events_url.value
@property
def repos_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._repos_url)
return self._repos_url.value
@property
def starred_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._starred_url)
return self._starred_url.value
@property
def subscriptions_url(self):
"""
:type: string
"""
self._completeIfNotSet(self._subscriptions_url)
return self._subscriptions_url.value
@property
def total_private_repos(self):
"""
:type: integer
"""
self._completeIfNotSet(self._total_private_repos)
return self._total_private_repos.value
@property
def type(self):
"""
:type: string
"""
self._completeIfNotSet(self._type)
return self._type.value
@property
def updated_at(self):
"""
:type: datetime.datetime
"""
self._completeIfNotSet(self._updated_at)
return self._updated_at.value
@property
def url(self):
"""
:type: string
"""
self._completeIfNotSet(self._url)
return self._url.value
def get_events(self):
"""
:calls: `GET /users/:user/events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events",
None
)
def get_followers(self):
"""
:calls: `GET /users/:user/followers <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/followers",
None
)
def get_following(self):
"""
:calls: `GET /users/:user/following <http://developer.github.com/v3/users/followers>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.NamedUser.NamedUser`
"""
return github.PaginatedList.PaginatedList(
NamedUser,
self._requester,
self.url + "/following",
None
)
def get_gists(self):
"""
:calls: `GET /users/:user/gists <http://developer.github.com/v3/gists>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Gist.Gist`
"""
return github.PaginatedList.PaginatedList(
github.Gist.Gist,
self._requester,
self.url + "/gists",
None
)
def get_keys(self):
"""
:calls: `GET /users/:user/keys <http://developer.github.com/v3/users/keys>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.UserKey.UserKey`
"""
return github.PaginatedList.PaginatedList(
github.UserKey.UserKey,
self._requester,
self.url + "/keys",
None
)
def get_orgs(self):
"""
:calls: `GET /users/:user/orgs <http://developer.github.com/v3/orgs>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Organization.Organization`
"""
return github.PaginatedList.PaginatedList(
github.Organization.Organization,
self._requester,
self.url + "/orgs",
None
)
def get_public_events(self):
"""
:calls: `GET /users/:user/events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/events/public",
None
)
def get_public_received_events(self):
"""
:calls: `GET /users/:user/received_events/public <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events/public",
None
)
def get_received_events(self):
"""
:calls: `GET /users/:user/received_events <http://developer.github.com/v3/activity/events>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Event.Event`
"""
return github.PaginatedList.PaginatedList(
github.Event.Event,
self._requester,
self.url + "/received_events",
None
)
def get_repo(self, name):
"""
:calls: `GET /repos/:owner/:repo <http://developer.github.com/v3/repos>`_
:param name: string
:rtype: :class:`github.Repository.Repository`
"""
assert isinstance(name, (str, unicode)), name
headers, data = self._requester.requestJsonAndCheck(
"GET",
"/repos/" + self.login + "/" + name
)
return github.Repository.Repository(self._requester, headers, data, completed=True)
def get_repos(self, type=github.GithubObject.NotSet):
"""
:calls: `GET /users/:user/repos <http://developer.github.com/v3/repos>`_
:param type: string
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
assert type is github.GithubObject.NotSet or isinstance(type, (str, unicode)), type
url_parameters = dict()
if type is not github.GithubObject.NotSet:
url_parameters["type"] = type
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/repos",
url_parameters
)
def get_starred(self):
"""
:calls: `GET /users/:user/starred <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/starred",
None
)
def get_subscriptions(self):
"""
:calls: `GET /users/:user/subscriptions <http://developer.github.com/v3/activity/watching>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/subscriptions",
None
)
def get_watched(self):
"""
:calls: `GET /users/:user/watched <http://developer.github.com/v3/activity/starring>`_
:rtype: :class:`github.PaginatedList.PaginatedList` of :class:`github.Repository.Repository`
"""
return github.PaginatedList.PaginatedList(
github.Repository.Repository,
self._requester,
self.url + "/watched",
None
)
def has_in_following(self, following):
"""
:calls: `GET /users/:user/following/:target_user <http://developer.github.com/v3/users/followers/#check-if-one-user-follows-another>`_
:param following: :class:`github.NamedUser.NamedUser`
:rtype: bool
"""
assert isinstance(following, github.NamedUser.NamedUser), following
status, headers, data = self._requester.requestJson(
"GET",
self.url + "/following/" + following._identity
)
return status == 204
@property
def _identity(self):
return self.login
def _initAttributes(self):
self._avatar_url = github.GithubObject.NotSet
self._bio = github.GithubObject.NotSet
self._blog = github.GithubObject.NotSet
self._collaborators = github.GithubObject.NotSet
self._company = github.GithubObject.NotSet
self._contributions = github.GithubObject.NotSet
self._created_at = github.GithubObject.NotSet
self._disk_usage = github.GithubObject.NotSet
self._email = github.GithubObject.NotSet
self._events_url = github.GithubObject.NotSet
self._followers = github.GithubObject.NotSet
self._followers_url = github.GithubObject.NotSet
self._following = github.GithubObject.NotSet
self._following_url = github.GithubObject.NotSet
self._gists_url = github.GithubObject.NotSet
self._gravatar_id = github.GithubObject.NotSet
self._hireable = github.GithubObject.NotSet
self._html_url = github.GithubObject.NotSet
self._id = github.GithubObject.NotSet
self._location = github.GithubObject.NotSet
self._login = github.GithubObject.NotSet
self._name = github.GithubObject.NotSet
self._organizations_url = github.GithubObject.NotSet
self._owned_private_repos = github.GithubObject.NotSet
self._plan = github.GithubObject.NotSet
self._private_gists = github.GithubObject.NotSet
self._public_gists = github.GithubObject.NotSet
self._public_repos = github.GithubObject.NotSet
self._received_events_url = github.GithubObject.NotSet
self._repos_url = github.GithubObject.NotSet
self._starred_url = github.GithubObject.NotSet
self._subscriptions_url = github.GithubObject.NotSet
self._total_private_repos = github.GithubObject.NotSet
self._type = github.GithubObject.NotSet
self._updated_at = github.GithubObject.NotSet
self._url = github.GithubObject.NotSet
def _useAttributes(self, attributes):
if "avatar_url" in attributes: # pragma no branch
self._avatar_url = self._makeStringAttribute(attributes["avatar_url"])
if "bio" in attributes: # pragma no branch
self._bio = self._makeStringAttribute(attributes["bio"])
if "blog" in attributes: # pragma no branch
self._blog = self._makeStringAttribute(attributes["blog"])
if "collaborators" in attributes: # pragma no branch
self._collaborators = self._makeIntAttribute(attributes["collaborators"])
if "company" in attributes: # pragma no branch
self._company = self._makeStringAttribute(attributes["company"])
if "contributions" in attributes: # pragma no branch
self._contributions = self._makeIntAttribute(attributes["contributions"])
if "created_at" in attributes: # pragma no branch
self._created_at = self._makeDatetimeAttribute(attributes["created_at"])
if "disk_usage" in attributes: # pragma no branch
self._disk_usage = self._makeIntAttribute(attributes["disk_usage"])
if "email" in attributes: # pragma no branch
self._email = self._makeStringAttribute(attributes["email"])
if "events_url" in attributes: # pragma no branch
self._events_url = self._makeStringAttribute(attributes["events_url"])
if "followers" in attributes: # pragma no branch
self._followers = self._makeIntAttribute(attributes["followers"])
if "followers_url" in attributes: # pragma no branch
self._followers_url = self._makeStringAttribute(attributes["followers_url"])
if "following" in attributes: # pragma no branch
self._following = self._makeIntAttribute(attributes["following"])
if "following_url" in attributes: # pragma no branch
self._following_url = self._makeStringAttribute(attributes["following_url"])
if "gists_url" in attributes: # pragma no branch
self._gists_url = self._makeStringAttribute(attributes["gists_url"])
if "gravatar_id" in attributes: # pragma no branch
self._gravatar_id = self._makeStringAttribute(attributes["gravatar_id"])
if "hireable" in attributes: # pragma no branch
self._hireable = self._makeBoolAttribute(attributes["hireable"])
if "html_url" in attributes: # pragma no branch
self._html_url = self._makeStringAttribute(attributes["html_url"])
if "id" in attributes: # pragma no branch
self._id = self._makeIntAttribute(attributes["id"])
if "location" in attributes: # pragma no branch
self._location = self._makeStringAttribute(attributes["location"])
if "login" in attributes: # pragma no branch
self._login = self._makeStringAttribute(attributes["login"])
if "name" in attributes: # pragma no branch
self._name = self._makeStringAttribute(attributes["name"])
if "organizations_url" in attributes: # pragma no branch
self._organizations_url = self._makeStringAttribute(attributes["organizations_url"])
if "owned_private_repos" in attributes: # pragma no branch
self._owned_private_repos = self._makeIntAttribute(attributes["owned_private_repos"])
if "plan" in attributes: # pragma no branch
self._plan = self._makeClassAttribute(github.Plan.Plan, attributes["plan"])
if "private_gists" in attributes: # pragma no branch
self._private_gists = self._makeIntAttribute(attributes["private_gists"])
if "public_gists" in attributes: # pragma no branch
self._public_gists = self._makeIntAttribute(attributes["public_gists"])
if "public_repos" in attributes: # pragma no branch
self._public_repos = self._makeIntAttribute(attributes["public_repos"])
if "received_events_url" in attributes: # pragma no branch
self._received_events_url = self._makeStringAttribute(attributes["received_events_url"])
if "repos_url" in attributes: # pragma no branch
self._repos_url = self._makeStringAttribute(attributes["repos_url"])
if "starred_url" in attributes: # pragma no branch
self._starred_url = self._makeStringAttribute(attributes["starred_url"])
if "subscriptions_url" in attributes: # pragma no branch
self._subscriptions_url = self._makeStringAttribute(attributes["subscriptions_url"])
if "total_private_repos" in attributes: # pragma no branch
self._total_private_repos = self._makeIntAttribute(attributes["total_private_repos"])
if "type" in attributes: # pragma no branch
self._type = self._makeStringAttribute(attributes["type"])
if "updated_at" in attributes: # pragma no branch
self._updated_at = self._makeDatetimeAttribute(attributes["updated_at"])
if "url" in attributes: # pragma no branch
self._url = self._makeStringAttribute(attributes["url"])
| gpl-3.0 |
wilsaj/openaddresses | apply-us-data.py | 13 | 2968 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv, json
from glob import glob
from os.path import basename, join
with open(join('us-data', 'codes.txt')) as f:
rows = list(csv.DictReader(f, dialect='excel-tab'))
codes = dict([(row['Postal Code'].lower(), row['State']) for row in rows])
with open(join('us-data', 'states.txt')) as f:
rows = list(csv.DictReader(f, dialect='excel-tab'))
states = dict([(row['Name'], row['State FIPS']) for row in rows])
with open(join('us-data', 'counties.txt')) as f:
counties = dict()
for row in csv.DictReader(f, dialect='excel-tab'):
key = row['State FIPS'], row['Name']
value = row['County FIPS'], row['Name']
counties[key] = value
# some key variations
if row['Name'].endswith(' County'):
counties[(row['State FIPS'], row['Name'][:-7])] = value
if row['Name'].endswith(' Parish'):
counties[(row['State FIPS'], row['Name'][:-7])] = value
if row['Name'].endswith(' Municipality'):
counties[(row['State FIPS'], row['Name'][:-13])] = value
# more key variations
for ((s, c), value) in list(counties.items()):
if c.startswith('St. '):
counties[(s, 'Saint '+c[4:])] = value
for ((s, c), value) in list(counties.items()):
counties[(s, c.lower())] = value
counties[(s, c.replace('-', ' '))] = value
counties[(s, c.replace('-', ' ').lower())] = value
for path in glob('sources/us/**/*.json'):
try:
with open(path) as f:
data = f.read()
info = json.loads(data)
except:
print path, ' is invalid json'
raise
if 'county' not in info.get('coverage', {}):
continue
if 'US Census' in info['coverage']:
continue
print path, '...'
prefix = '\n "coverage": {\n '
assert prefix + '"' in data
state_name = codes[info['coverage']['state']]
state_fips = states[state_name]
county = info['coverage']['county']
# if type(county) is list or basename(path)[6:-5] != county.lower().replace(' ', '-'):
# continue
if type(county) is list:
county_names = [counties[(state_fips, c)] for c in county]
print info['coverage'], state_fips, state_name, county_names
continue
try:
if u'ñ' in county:
county_fips, county_name = counties[(state_fips, county.replace(u'ñ', 'n'))]
else:
county_fips, county_name = counties[(state_fips, county)]
except Exception as inst:
print " error generating county"
continue
geoid = state_fips + county_fips
census_dict = dict(geoid=geoid, name=county_name, state=state_name)
census_json = json.dumps(census_dict, sort_keys=True)
new_data = data.replace(prefix, '{0}"US Census": {1},\n '.format(prefix, census_json))
with open(path, 'w') as file:
file.write(new_data)
| bsd-3-clause |
jbenden/ansible | lib/ansible/modules/cloud/docker/docker_image.py | 9 | 21475 | #!/usr/bin/python
#
# Copyright 2016 Red Hat | Ansible
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: docker_image
short_description: Manage docker images.
version_added: "1.5"
description:
- Build, load or pull an image, making the image available for creating containers. Also supports tagging an
image into a repository and archiving an image to a .tar file.
options:
archive_path:
description:
- Use with state C(present) to archive an image to a .tar file.
required: false
version_added: "2.1"
load_path:
description:
- Use with state C(present) to load an image from a .tar file.
required: false
version_added: "2.2"
dockerfile:
description:
- Use with state C(present) to provide an alternate name for the Dockerfile to use when building an image.
default: Dockerfile
required: false
version_added: "2.0"
force:
description:
- Use with state I(absent) to un-tag and remove all images matching the specified name. Use with state
C(present) to build, load or pull an image when the image already exists.
default: false
required: false
version_added: "2.1"
type: bool
http_timeout:
description:
- Timeout for HTTP requests during the image build operation. Provide a positive integer value for the number of
seconds.
required: false
version_added: "2.1"
name:
description:
- "Image name. Name format will be one of: name, repository/name, registry_server:port/name.
When pushing or pulling an image the name can optionally include the tag by appending ':tag_name'."
required: true
path:
description:
- Use with state 'present' to build an image. Will be the path to a directory containing the context and
Dockerfile for building an image.
aliases:
- build_path
required: false
pull:
description:
- When building an image downloads any updates to the FROM image in Dockerfile.
default: true
required: false
version_added: "2.1"
type: bool
push:
description:
- Push the image to the registry. Specify the registry as part of the I(name) or I(repository) parameter.
default: false
required: false
version_added: "2.2"
type: bool
rm:
description:
- Remove intermediate containers after build.
default: true
required: false
version_added: "2.1"
type: bool
nocache:
description:
- Do not use cache when building an image.
default: false
required: false
type: bool
repository:
description:
- Full path to a repository. Use with state C(present) to tag the image into the repository. Expects
format I(repository:tag). If no tag is provided, will use the value of the C(tag) parameter or I(latest).
required: false
version_added: "2.1"
state:
description:
- Make assertions about the state of an image.
- When C(absent) an image will be removed. Use the force option to un-tag and remove all images
matching the provided name.
- When C(present) check if an image exists using the provided name and tag. If the image is not found or the
force option is used, the image will either be pulled, built or loaded. By default the image will be pulled
from Docker Hub. To build the image, provide a path value set to a directory containing a context and
Dockerfile. To load an image, specify load_path to provide a path to an archive file. To tag an image to a
repository, provide a repository path. If the name contains a repository path, it will be pushed.
- "NOTE: C(build) is DEPRECATED and will be removed in release 2.3. Specifying C(build) will behave the
same as C(present)."
required: false
default: present
choices:
- absent
- present
- build
tag:
description:
- Used to select an image when pulling. Will be added to the image when pushing, tagging or building. Defaults to
I(latest).
- If C(name) parameter format is I(name:tag), then tag value from C(name) will take precedence.
default: latest
required: false
buildargs:
description:
- Provide a dictionary of C(key:value) build arguments that map to Dockerfile ARG directive.
- Docker expects the value to be a string. For convenience any non-string values will be converted to strings.
- Requires Docker API >= 1.21 and docker-py >= 1.7.0.
required: false
version_added: "2.2"
container_limits:
description:
- A dictionary of limits applied to each container created by the build process.
required: false
version_added: "2.1"
suboptions:
memory:
description:
- Set memory limit for build.
memswap:
description:
- Total memory (memory + swap), -1 to disable swap.
cpushares:
description:
- CPU shares (relative weight).
cpusetcpus:
description:
- CPUs in which to allow execution, e.g., "0-3", "0,1".
use_tls:
description:
- "DEPRECATED. Whether to use tls to connect to the docker server. Set to C(no) when TLS will not be used. Set to
C(encrypt) to use TLS. And set to C(verify) to use TLS and verify that the server's certificate is valid for the
server. NOTE: If you specify this option, it will set the value of the tls or tls_verify parameters."
choices:
- no
- encrypt
- verify
default: no
required: false
version_added: "2.0"
extends_documentation_fragment:
- docker
requirements:
- "python >= 2.6"
- "docker-py >= 1.7.0"
- "Docker API >= 1.20"
author:
- Pavel Antonov (@softzilla)
- Chris Houseknecht (@chouseknecht)
- James Tanner (@jctanner)
'''
EXAMPLES = '''
- name: pull an image
docker_image:
name: pacur/centos-7
- name: Tag and push to docker hub
docker_image:
name: pacur/centos-7
repository: dcoppenhagan/myimage
tag: 7.0
push: yes
- name: Tag and push to local registry
docker_image:
name: centos
repository: localhost:5000/centos
tag: 7
push: yes
- name: Remove image
docker_image:
state: absent
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
- name: Build an image and push it to a private repo
docker_image:
path: ./sinatra
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
push: yes
- name: Archive image
docker_image:
name: registry.ansible.com/chouseknecht/sinatra
tag: v1
archive_path: my_sinatra.tar
- name: Load image from archive and push to a private registry
docker_image:
name: localhost:5000/myimages/sinatra
tag: v1
push: yes
load_path: my_sinatra.tar
- name: Build image and with buildargs
docker_image:
path: /path/to/build/dir
name: myimage
buildargs:
log_volume: /var/log/myapp
listen_port: 8080
'''
RETURN = '''
image:
description: Image inspection results for the affected image.
returned: success
type: dict
sample: {}
'''
import os
import re
from ansible.module_utils.docker_common import HAS_DOCKER_PY_2, AnsibleDockerClient, DockerBaseClass
from ansible.module_utils._text import to_native
try:
if HAS_DOCKER_PY_2:
from docker.auth import resolve_repository_name
else:
from docker.auth.auth import resolve_repository_name
from docker.utils.utils import parse_repository_tag
except ImportError:
# missing docker-py handled in docker_common
pass
class ImageManager(DockerBaseClass):
def __init__(self, client, results):
super(ImageManager, self).__init__()
self.client = client
self.results = results
parameters = self.client.module.params
self.check_mode = self.client.check_mode
self.archive_path = parameters.get('archive_path')
self.container_limits = parameters.get('container_limits')
self.dockerfile = parameters.get('dockerfile')
self.force = parameters.get('force')
self.load_path = parameters.get('load_path')
self.name = parameters.get('name')
self.nocache = parameters.get('nocache')
self.path = parameters.get('path')
self.pull = parameters.get('pull')
self.repository = parameters.get('repository')
self.rm = parameters.get('rm')
self.state = parameters.get('state')
self.tag = parameters.get('tag')
self.http_timeout = parameters.get('http_timeout')
self.push = parameters.get('push')
self.buildargs = parameters.get('buildargs')
# If name contains a tag, it takes precedence over tag parameter.
repo, repo_tag = parse_repository_tag(self.name)
if repo_tag:
self.name = repo
self.tag = repo_tag
if self.state in ['present', 'build']:
self.present()
elif self.state == 'absent':
self.absent()
def fail(self, msg):
self.client.fail(msg)
def present(self):
'''
Handles state = 'present', which includes building, loading or pulling an image,
depending on user provided parameters.
:returns None
'''
image = self.client.find_image(name=self.name, tag=self.tag)
if not image or self.force:
if self.path:
# Build the image
if not os.path.isdir(self.path):
self.fail("Requested build path %s could not be found or you do not have access." % self.path)
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.log("Building image %s" % image_name)
self.results['actions'].append("Built image %s from %s" % (image_name, self.path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.build_image()
elif self.load_path:
# Load the image from an archive
if not os.path.isfile(self.load_path):
self.fail("Error loading image %s. Specified path %s does not exist." % (self.name,
self.load_path))
image_name = self.name
if self.tag:
image_name = "%s:%s" % (self.name, self.tag)
self.results['actions'].append("Loaded image %s from %s" % (image_name, self.load_path))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.load_image()
else:
# pull the image
self.results['actions'].append('Pulled image %s:%s' % (self.name, self.tag))
self.results['changed'] = True
if not self.check_mode:
self.results['image'] = self.client.pull_image(self.name, tag=self.tag)
if self.archive_path:
self.archive_image(self.name, self.tag)
if self.push and not self.repository:
self.push_image(self.name, self.tag)
elif self.repository:
self.tag_image(self.name, self.tag, self.repository, force=self.force, push=self.push)
def absent(self):
'''
Handles state = 'absent', which removes an image.
:return None
'''
image = self.client.find_image(self.name, self.tag)
if image:
name = self.name
if self.tag:
name = "%s:%s" % (self.name, self.tag)
if not self.check_mode:
try:
self.client.remove_image(name, force=self.force)
except Exception as exc:
self.fail("Error removing image %s - %s" % (name, str(exc)))
self.results['changed'] = True
self.results['actions'].append("Removed image %s" % (name))
self.results['image']['state'] = 'Deleted'
def archive_image(self, name, tag):
'''
Archive an image to a .tar file. Called when archive_path is passed.
:param name - name of the image. Type: str
:return None
'''
if not tag:
tag = "latest"
image = self.client.find_image(name=name, tag=tag)
if not image:
self.log("archive image: image %s:%s not found" % (name, tag))
return
image_name = "%s:%s" % (name, tag)
self.results['actions'].append('Archived image %s to %s' % (image_name, self.archive_path))
self.results['changed'] = True
if not self.check_mode:
self.log("Getting archive of image %s" % image_name)
try:
image = self.client.get_image(image_name)
except Exception as exc:
self.fail("Error getting image %s - %s" % (image_name, str(exc)))
try:
with open(self.archive_path, 'w') as fd:
for chunk in image.stream(2048, decode_content=False):
fd.write(chunk)
except Exception as exc:
self.fail("Error writing image archive %s - %s" % (self.archive_path, str(exc)))
image = self.client.find_image(name=name, tag=tag)
if image:
self.results['image'] = image
def push_image(self, name, tag=None):
'''
If the name of the image contains a repository path, then push the image.
:param name Name of the image to push.
:param tag Use a specific tag.
:return: None
'''
repository = name
if not tag:
repository, tag = parse_repository_tag(name)
registry, repo_name = resolve_repository_name(repository)
self.log("push %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
if registry:
self.results['actions'].append("Pushed image %s to %s/%s:%s" % (self.name, registry, repo_name, tag))
self.results['changed'] = True
if not self.check_mode:
status = None
try:
for line in self.client.push(repository, tag=tag, stream=True, decode=True):
self.log(line, pretty_print=True)
if line.get('errorDetail'):
raise Exception(line['errorDetail']['message'])
status = line.get('status')
except Exception as exc:
if re.search('unauthorized', str(exc)):
if re.search('authentication required', str(exc)):
self.fail("Error pushing image %s/%s:%s - %s. Try logging into %s first." %
(registry, repo_name, tag, str(exc), registry))
else:
self.fail("Error pushing image %s/%s:%s - %s. Does the repository exist?" %
(registry, repo_name, tag, str(exc)))
self.fail("Error pushing image %s: %s" % (repository, str(exc)))
self.results['image'] = self.client.find_image(name=repository, tag=tag)
if not self.results['image']:
self.results['image'] = dict()
self.results['image']['push_status'] = status
def tag_image(self, name, tag, repository, force=False, push=False):
'''
Tag an image into a repository.
:param name: name of the image. required.
:param tag: image tag.
:param repository: path to the repository. required.
:param force: bool. force tagging, even it image already exists with the repository path.
:param push: bool. push the image once it's tagged.
:return: None
'''
repo, repo_tag = parse_repository_tag(repository)
if not repo_tag:
repo_tag = "latest"
if tag:
repo_tag = tag
image = self.client.find_image(name=repo, tag=repo_tag)
found = 'found' if image else 'not found'
self.log("image %s was %s" % (repo, found))
if not image or force:
self.log("tagging %s:%s to %s:%s" % (name, tag, repo, repo_tag))
self.results['changed'] = True
self.results['actions'].append("Tagged image %s:%s to %s:%s" % (name, tag, repo, repo_tag))
if not self.check_mode:
try:
# Finding the image does not always work, especially running a localhost registry. In those
# cases, if we don't set force=True, it errors.
image_name = name
if tag and not re.search(tag, name):
image_name = "%s:%s" % (name, tag)
tag_status = self.client.tag(image_name, repo, tag=repo_tag, force=True)
if not tag_status:
raise Exception("Tag operation failed.")
except Exception as exc:
self.fail("Error: failed to tag image - %s" % str(exc))
self.results['image'] = self.client.find_image(name=repo, tag=repo_tag)
if push:
self.push_image(repo, repo_tag)
def build_image(self):
'''
Build an image
:return: image dict
'''
params = dict(
path=self.path,
tag=self.name,
rm=self.rm,
nocache=self.nocache,
stream=True,
timeout=self.http_timeout,
pull=self.pull,
forcerm=self.rm,
dockerfile=self.dockerfile,
decode=True
)
build_output = []
if self.tag:
params['tag'] = "%s:%s" % (self.name, self.tag)
if self.container_limits:
params['container_limits'] = self.container_limits
if self.buildargs:
for key, value in self.buildargs.items():
self.buildargs[key] = to_native(value)
params['buildargs'] = self.buildargs
for line in self.client.build(**params):
# line = json.loads(line)
self.log(line, pretty_print=True)
if "stream" in line:
build_output.append(line["stream"])
if line.get('error'):
if line.get('errorDetail'):
errorDetail = line.get('errorDetail')
self.fail(
"Error building %s - code: %s, message: %s, logs: %s" % (
self.name,
errorDetail.get('code'),
errorDetail.get('message'),
build_output))
else:
self.fail("Error building %s - message: %s, logs: %s" % (
self.name, line.get('error'), build_output))
return self.client.find_image(name=self.name, tag=self.tag)
def load_image(self):
'''
Load an image from a .tar archive
:return: image dict
'''
try:
self.log("Opening image %s" % self.load_path)
image_tar = open(self.load_path, 'r')
except Exception as exc:
self.fail("Error opening image %s - %s" % (self.load_path, str(exc)))
try:
self.log("Loading image from %s" % self.load_path)
self.client.load_image(image_tar)
except Exception as exc:
self.fail("Error loading image %s - %s" % (self.name, str(exc)))
try:
image_tar.close()
except Exception as exc:
self.fail("Error closing image %s - %s" % (self.name, str(exc)))
return self.client.find_image(self.name, self.tag)
def main():
argument_spec = dict(
archive_path=dict(type='path'),
container_limits=dict(type='dict'),
dockerfile=dict(type='str'),
force=dict(type='bool', default=False),
http_timeout=dict(type='int'),
load_path=dict(type='path'),
name=dict(type='str', required=True),
nocache=dict(type='bool', default=False),
path=dict(type='path', aliases=['build_path']),
pull=dict(type='bool', default=True),
push=dict(type='bool', default=False),
repository=dict(type='str'),
rm=dict(type='bool', default=True),
state=dict(type='str', choices=['absent', 'present', 'build'], default='present'),
tag=dict(type='str', default='latest'),
use_tls=dict(type='str', default='no', choices=['no', 'encrypt', 'verify']),
buildargs=dict(type='dict', default=None),
)
client = AnsibleDockerClient(
argument_spec=argument_spec,
supports_check_mode=True,
)
results = dict(
changed=False,
actions=[],
image={}
)
ImageManager(client, results)
client.module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
jbenden/ansible | lib/ansible/modules/network/cloudengine/ce_netstream_export.py | 45 | 19359 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: ce_netstream_export
version_added: "2.4"
short_description: Manages netstream export on HUAWEI CloudEngine switches.
description:
- Configure NetStream flow statistics exporting and versions for exported packets on HUAWEI CloudEngine switches.
author: Zhijin Zhou (@CloudEngine-Ansible)
notes:
options:
type:
description:
- Specifies NetStream feature.
required: true
choices: ['ip', 'vxlan']
default: null
source_ip:
description:
- Specifies source address which can be IPv6 or IPv4 of the exported NetStream packet.
required: false
default: null
host_ip:
description:
- Specifies destination address which can be IPv6 or IPv4 of the exported NetStream packet.
required: false
default: null
host_port:
description:
- Specifies the destination UDP port number of the exported packets.
The value is an integer that ranges from 1 to 65535.
required: false
default: null
host_vpn:
description:
- Specifies the VPN instance of the exported packets carrying flow statistics.
Ensure the VPN instance has been created on the device.
required: false
default: null
version:
description:
- Sets the version of exported packets.
required: false
choices: ['5', '9']
default: null
as_option:
description:
- Specifies the AS number recorded in the statistics as the original or the peer AS number.
required: false
choices: ['origin', 'peer']
default: null
bgp_nexthop:
description:
- Configures the statistics to carry BGP next hop information. Currently, only V9 supports the exported
packets carrying BGP next hop information.
required: false
choices: ['enable','disable']
default: 'disable'
state:
description:
- Manage the state of the resource.
required: false
choices: ['present','absent']
default: present
'''
EXAMPLES = '''
- name: netstream export module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Configures the source address for the exported packets carrying IPv4 flow statistics.
ce_netstream_export:
type: ip
source_ip: 192.8.2.2
provider: "{{ cli }}"
- name: Configures the source IP address for the exported packets carrying VXLAN flexible flow statistics.
ce_netstream_export:
type: vxlan
source_ip: 192.8.2.3
provider: "{{ cli }}"
- name: Configures the destination IP address and destination UDP port number for the exported packets carrying IPv4 flow statistics.
ce_netstream_export:
type: ip
host_ip: 192.8.2.4
host_port: 25
host_vpn: test
provider: "{{ cli }}"
- name: Configures the destination IP address and destination UDP port number for the exported packets carrying VXLAN flexible flow statistics.
ce_netstream_export:
type: vxlan
host_ip: 192.8.2.5
host_port: 26
host_vpn: test
provider: "{{ cli }}"
- name: Configures the version number of the exported packets carrying IPv4 flow statistics.
ce_netstream_export:
type: ip
version: 9
as_option: origin
bgp_nexthop: enable
provider: "{{ cli }}"
- name: Configures the version for the exported packets carrying VXLAN flexible flow statistics.
ce_netstream_export:
type: vxlan
version: 9
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {
"as_option": "origin",
"bgp_nexthop": "enable",
"host_ip": "192.8.5.6",
"host_port": "26",
"host_vpn": "test",
"source_ip": "192.8.2.5",
"state": "present",
"type": "ip",
"version": "9"
}
existing:
description: k/v pairs of existing attributes on the device
returned: always
type: dict
sample: {
"as_option": null,
"bgp_nexthop": "disable",
"host_ip": null,
"host_port": null,
"host_vpn": null,
"source_ip": null,
"type": "ip",
"version": null
}
end_state:
description: k/v pairs of end attributes on the device
returned: always
type: dict
sample: {
"as_option": "origin",
"bgp_nexthop": "enable",
"host_ip": "192.8.5.6",
"host_port": "26",
"host_vpn": "test",
"source_ip": "192.8.2.5",
"type": "ip",
"version": "9"
}
updates:
description: command list sent to the device
returned: always
type: list
sample: [
"netstream export ip source 192.8.2.5",
"netstream export ip host 192.8.5.6 26 vpn-instance test",
"netstream export ip version 9 origin-as bgp-nexthop"
]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_config, load_config
from ansible.module_utils.ce import ce_argument_spec
def is_ipv4_addr(ip_addr):
"""check ipaddress validate"""
rule1 = r'(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.'
rule2 = r'(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])'
ipv4_regex = '%s%s%s%s%s%s' % ('^', rule1, rule1, rule1, rule2, '$')
return bool(re.match(ipv4_regex, ip_addr))
def is_config_exist(cmp_cfg, test_cfg):
"""is configuration exist"""
test_cfg_tmp = test_cfg + ' *$' + '|' + test_cfg + ' *\n'
obj = re.compile(test_cfg_tmp)
result = re.findall(obj, cmp_cfg)
if not result:
return False
return True
class NetstreamExport(object):
"""Manange NetStream export"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# NetStream export configuration parameters
self.type = self.module.params['type']
self.source_ip = self.module.params['source_ip']
self.host_ip = self.module.params['host_ip']
self.host_port = self.module.params['host_port']
self.host_vpn = self.module.params['host_vpn']
self.version = self.module.params['version']
self.as_option = self.module.params['as_option']
self.bgp_netxhop = self.module.params['bgp_nexthop']
self.state = self.module.params['state']
self.commands = list()
self.config = None
self.exist_conf = dict()
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def __init_module__(self):
"""init module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def cli_load_config(self, commands):
"""load config by cli"""
if not self.module.check_mode:
load_config(self.module, commands)
def get_netstream_config(self):
"""get current netstream configuration"""
flags = list()
exp = " | inc ^netstream export"
flags.append(exp)
return get_config(self.module, flags)
def get_existing(self):
"""get existing config"""
self.existing = dict(type=self.type,
source_ip=self.exist_conf['source_ip'],
host_ip=self.exist_conf['host_ip'],
host_port=self.exist_conf['host_port'],
host_vpn=self.exist_conf['host_vpn'],
version=self.exist_conf['version'],
as_option=self.exist_conf['as_option'],
bgp_nexthop=self.exist_conf['bgp_netxhop'])
def get_proposed(self):
"""get proposed config"""
self.proposed = dict(type=self.type,
source_ip=self.source_ip,
host_ip=self.host_ip,
host_port=self.host_port,
host_vpn=self.host_vpn,
version=self.version,
as_option=self.as_option,
bgp_nexthop=self.bgp_netxhop,
state=self.state)
def get_end_state(self):
"""get end config"""
self.get_config_data()
self.end_state = dict(type=self.type,
source_ip=self.exist_conf['source_ip'],
host_ip=self.exist_conf['host_ip'],
host_port=self.exist_conf['host_port'],
host_vpn=self.exist_conf['host_vpn'],
version=self.exist_conf['version'],
as_option=self.exist_conf['as_option'],
bgp_nexthop=self.exist_conf['bgp_netxhop'])
def show_result(self):
"""show result"""
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def cli_add_command(self, command, undo=False):
"""add command to self.update_cmd and self.commands"""
if undo and command.lower() not in ["quit", "return"]:
cmd = "undo " + command
else:
cmd = command
self.commands.append(cmd) # set to device
if command.lower() not in ["quit", "return"]:
if cmd not in self.updates_cmd:
self.updates_cmd.append(cmd) # show updates result
def config_nets_export_src_addr(self):
"""Configures the source address for the exported packets"""
if is_ipv4_addr(self.source_ip):
if self.type == 'ip':
cmd = "netstream export ip source %s" % self.source_ip
else:
cmd = "netstream export vxlan inner-ip source %s" % self.source_ip
else:
if self.type == 'ip':
cmd = "netstream export ip source ipv6 %s" % self.source_ip
else:
cmd = "netstream export vxlan inner-ip source ipv6 %s" % self.source_ip
if is_config_exist(self.config, cmd):
self.exist_conf['source_ip'] = self.source_ip
if self.state == 'present':
return
else:
undo = True
else:
if self.state == 'absent':
return
else:
undo = False
self.cli_add_command(cmd, undo)
def config_nets_export_host_addr(self):
"""Configures the destination IP address and destination UDP port number"""
if is_ipv4_addr(self.host_ip):
if self.type == 'ip':
cmd = 'netstream export ip host %s %s' % (self.host_ip, self.host_port)
else:
cmd = 'netstream export vxlan inner-ip host %s %s' % (self.host_ip, self.host_port)
else:
if self.type == 'ip':
cmd = 'netstream export ip host ipv6 %s %s' % (self.host_ip, self.host_port)
else:
cmd = 'netstream export vxlan inner-ip host ipv6 %s %s' % (self.host_ip, self.host_port)
if self.host_vpn:
cmd += " vpn-instance %s" % self.host_vpn
if is_config_exist(self.config, cmd):
self.exist_conf['host_ip'] = self.host_ip
self.exist_conf['host_port'] = self.host_port
if self.host_vpn:
self.exist_conf['host_vpn'] = self.host_vpn
if self.state == 'present':
return
else:
undo = True
else:
if self.state == 'absent':
return
else:
undo = False
self.cli_add_command(cmd, undo)
def config_nets_export_vxlan_ver(self):
"""Configures the version for the exported packets carrying VXLAN flexible flow statistics"""
cmd = 'netstream export vxlan inner-ip version 9'
if is_config_exist(self.config, cmd):
self.exist_conf['version'] = self.version
if self.state == 'present':
return
else:
undo = True
else:
if self.state == 'absent':
return
else:
undo = False
self.cli_add_command(cmd, undo)
def config_nets_export_ip_ver(self):
"""Configures the version number of the exported packets carrying IPv4 flow statistics"""
cmd = 'netstream export ip version %s' % self.version
if self.version == '5':
if self.as_option == 'origin':
cmd += ' origin-as'
elif self.as_option == 'peer':
cmd += ' peer-as'
else:
if self.as_option == 'origin':
cmd += ' origin-as'
elif self.as_option == 'peer':
cmd += ' peer-as'
if self.bgp_netxhop == 'enable':
cmd += ' bgp-nexthop'
if cmd == 'netstream export ip version 5':
cmd_tmp = "netstream export ip version"
if is_config_exist(self.config, cmd_tmp):
if self.state == 'present':
self.cli_add_command(cmd, False)
else:
self.exist_conf['version'] = self.version
return
if is_config_exist(self.config, cmd):
self.exist_conf['version'] = self.version
self.exist_conf['as_option'] = self.as_option
self.exist_conf['bgp_netxhop'] = self.bgp_netxhop
if self.state == 'present':
return
else:
undo = True
else:
if self.state == 'absent':
return
else:
undo = False
self.cli_add_command(cmd, undo)
def config_netstream_export(self):
"""configure netstream export"""
if self.commands:
self.cli_load_config(self.commands)
self.changed = True
def check_params(self):
"""Check all input params"""
if not self.type:
self.module.fail_json(msg='Error: The value of type cannot be empty.')
if self.host_port:
if not self.host_port.isdigit():
self.module.fail_json(msg='Error: Host port is invalid.')
if int(self.host_port) < 1 or int(self.host_port) > 65535:
self.module.fail_json(msg='Error: Host port is not in the range from 1 to 65535.')
if self.host_vpn:
if self.host_vpn == '_public_':
self.module.fail_json(
msg='Error: The host vpn name _public_ is reserved.')
if len(self.host_vpn) < 1 or len(self.host_vpn) > 31:
self.module.fail_json(msg='Error: The host vpn name length is not in the range from 1 to 31.')
if self.type == 'vxlan' and self.version == '5':
self.module.fail_json(msg="Error: When type is vxlan, version must be 9.")
if self.type == 'ip' and self.version == '5' and self.bgp_netxhop == 'enable':
self.module.fail_json(msg="Error: When type=ip and version=5, bgp_netxhop is not supported.")
if (self.host_ip and not self.host_port) or (self.host_port and not self.host_ip):
self.module.fail_json(msg="Error: host_ip and host_port must both exist or not exist.")
def get_config_data(self):
"""get configuration commands and current configuration"""
self.exist_conf['type'] = self.type
self.exist_conf['source_ip'] = None
self.exist_conf['host_ip'] = None
self.exist_conf['host_port'] = None
self.exist_conf['host_vpn'] = None
self.exist_conf['version'] = None
self.exist_conf['as_option'] = None
self.exist_conf['bgp_netxhop'] = 'disable'
self.config = self.get_netstream_config()
if self.type and self.source_ip:
self.config_nets_export_src_addr()
if self.type and self.host_ip and self.host_port:
self.config_nets_export_host_addr()
if self.type == 'vxlan' and self.version == '9':
self.config_nets_export_vxlan_ver()
if self.type == 'ip' and self.version:
self.config_nets_export_ip_ver()
def work(self):
"""excute task"""
self.check_params()
self.get_proposed()
self.get_config_data()
self.get_existing()
self.config_netstream_export()
self.get_end_state()
self.show_result()
def main():
"""main function entry"""
argument_spec = dict(
type=dict(required=True, type='str', choices=['ip', 'vxlan']),
source_ip=dict(required=False, type='str'),
host_ip=dict(required=False, type='str'),
host_port=dict(required=False, type='str'),
host_vpn=dict(required=False, type='str'),
version=dict(required=False, type='str', choices=['5', '9']),
as_option=dict(required=False, type='str', choices=['origin', 'peer']),
bgp_nexthop=dict(required=False, type='str', choices=['enable', 'disable'], default='disable'),
state=dict(choices=['absent', 'present'], default='present', required=False)
)
argument_spec.update(ce_argument_spec)
netstream_export = NetstreamExport(argument_spec)
netstream_export.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
arielmakestuff/loadlimit | test/unit/importhook/test_taskfilematch.py | 1 | 1807 | # -*- coding: utf-8 -*-
# test/unit/importhook/test_taskfilematch.py
# Copyright (C) 2016 authors and contributors (see AUTHORS file)
#
# This module is released under the MIT License.
"""Test taskfilematch()"""
# ============================================================================
# Imports
# ============================================================================
# Stdlib imports
# Third-party imports
import pytest
# Local imports
import loadlimit.importhook
# ============================================================================
# Globals
# ============================================================================
taskfilematch = loadlimit.importhook.taskfilematch
# ============================================================================
# Test taskfilematch
# ============================================================================
@pytest.mark.parametrize('name', ['hello', 'world.py'])
def test_disabled_filename(monkeypatch, name):
"""Return None if filename ends with .disabled"""
def fake_isfile(filename):
return True
monkeypatch.setattr(loadlimit.importhook, 'isfile', fake_isfile)
n = '{}.disabled'.format(name)
assert taskfilematch(n) is None
@pytest.mark.parametrize('name', ['hello', 'world.py', 'what',
'now.py.disabled'])
def test_notfile_filename(monkeypatch, name):
"""Return None if filename ends with .disabled"""
def fake_isfile(filename):
return False
monkeypatch.setattr(loadlimit.importhook, 'isfile', fake_isfile)
n = '{}'.format(name)
assert taskfilematch(n) is None
# ============================================================================
#
# ============================================================================
| mit |
vrieni/orange | Orange/misc/__init__.py | 6 | 9949 | """
.. index:: misc
.. index: CostMatrix
-----------------------
CostMatrix
-----------------------
CostMatrix is an object that stores costs of (mis)classifications. Costs can be either negative or positive.
.. class:: CostMatrix
.. attribute:: class_var
The (class) attribute to which the matrix applies. This can
also be None.
.. attribute:: dimension (read only)
Matrix dimension, ie. number of classes.
.. method:: CostMatrix(dimension[, default cost])
Constructs a matrix of the given size and initializes it with
the default cost (1, if not given). All elements of the matrix
are assigned the given cost, except for the diagonal that have
the default cost of 0. (Diagonal elements represent correct
classifications and these usually have no price; you can,
however, change this.)
.. literalinclude:: code/CostMatrix.py
:lines: 1-8
This initializes the matrix and print it out:
.. literalinclude:: code/CostMatrix.res
:lines: 1-3
.. method:: CostMatrix(class descriptor[, default cost])
Similar as above, except that classVar is also set to the given descriptor.
The number of values of the given attribute (which must be discrete) is used
for dimension.
.. literalinclude:: code/CostMatrix.py
:lines: 10-11
This constructs a matrix similar to the one above (the class attribute in iris
domain is three-valued) except that the matrix contains 2s instead of 1s.
.. method:: CostMatrix([attribute descriptor, ]matrix)
Initializes the matrix with the elements given as a sequence of sequences (you
can mix lists and tuples if you find it funny). Each subsequence represents a row.
.. literalinclude:: code/CostMatrix.py
:lines: 13
If you print this matrix out, will it look like this:
.. literalinclude:: code/CostMatrix.res
:lines: 5-7
.. method:: setcost(predicted, correct, cost)
Set the misclassification cost. The matrix above could be
constructed by first initializing it with 2s and then changing
the prices for virginica's into 1s.
.. literalinclude:: code/CostMatrix.py
:lines: 15-17
.. method:: getcost(predicted, correct)
Returns the cost of prediction. Values must be integer
indices; if class_var is set, you can also use symbolic values
(strings). Note that there's no way to change the size of the
matrix. Size is set at construction and does not change. For
the final example, we shall compute the profits of knowing
attribute values in the dataset lenses with the same
cost-matrix as printed above.
.. literalinclude:: code/CostMatrix.py
:lines: 19-23
As the script shows, you don't have to (and usually won't) call the constructor
explicitly. Instead, you will set the corresponding field (in our case meas.cost)
to a matrix and let Orange convert it to CostMatrix automatically. Funny as it
might look, but since Orange uses constructor to perform such conversion, even
the above statement is correct (although the cost matrix is rather dull,
with 0s on the diagonal and 1s around):
.. literalinclude:: code/CostMatrix.py
:lines: 25
.. index: SymMatrix
-----------------------
SymMatrix
-----------------------
:obj:`SymMatrix` implements symmetric matrices of size fixed at
construction time (and stored in :obj:`SymMatrix.dim`).
.. class:: SymMatrix
.. attribute:: dim
Matrix dimension.
.. attribute:: matrix_type
Can be ``SymMatrix.Lower`` (0), ``SymMatrix.Upper`` (1),
``SymMatrix.Symmetric`` (2, default), ``SymMatrix.LowerFilled`` (3) or
``SymMatrix.Upper_Filled`` (4).
If the matrix type is ``Lower`` or ``Upper``, indexing
above or below the diagonal, respectively, will fail.
With ``LowerFilled`` and ``Upper_Filled``,
the elements upper or lower, respectively, still
exist and are set to zero, but they cannot be modified. The
default matrix type is ``Symmetric``, but can be changed
at any time.
If matrix type is ``Upper``, it is printed as:
>>> import Orange
>>> m = Orange.misc.SymMatrix(
... [[1],
... [2, 4],
... [3, 6, 9],
... [4, 8, 12, 16]])
>>> m.matrix_type = m.Upper
>>> print m
(( 1.000, 2.000, 3.000, 4.000),
( 4.000, 6.000, 8.000),
( 9.000, 12.000),
( 16.000))
Changing the type to ``LowerFilled`` changes the printout to
>>> m.matrix_type = m.LowerFilled
>>> print m
(( 1.000, 0.000, 0.000, 0.000),
( 2.000, 4.000, 0.000, 0.000),
( 3.000, 6.000, 9.000, 0.000),
( 4.000, 8.000, 12.000, 16.000))
.. method:: __init__(dim[, value])
Construct a symmetric matrix of the given dimension.
:param dim: matrix dimension
:type dim: int
:param value: default value (0 by default)
:type value: double
.. method:: __init__(data)
Construct a new symmetric matrix containing the given data.
These can be given as Python list containing lists or tuples.
The following example fills a matrix created above with
data in a list::
import Orange
m = [[],
[ 3],
[ 2, 4],
[17, 5, 4],
[ 2, 8, 3, 8],
[ 7, 5, 10, 11, 2],
[ 8, 4, 1, 5, 11, 13],
[ 4, 7, 12, 8, 10, 1, 5],
[13, 9, 14, 15, 7, 8, 4, 6],
[12, 10, 11, 15, 2, 5, 7, 3, 1]]
matrix = Orange.data.SymMatrix(m)
SymMatrix also stores diagonal elements. They are set
to zero, if they are not specified. The missing elements
(shorter lists) are set to zero as well. If a list
spreads over the diagonal, the constructor checks
for asymmetries. For instance, the matrix
::
m = [[],
[ 3, 0, f],
[ 2, 4]]
is only OK if f equals 2. Finally, no row can be longer
than matrix size.
.. method:: get_values()
Return all matrix values in a Python list.
.. method:: get_KNN(i, k)
Return k columns with the lowest value in the i-th row.
:param i: i-th row
:type i: int
:param k: number of neighbors
:type k: int
.. method:: avg_linkage(clusters)
Return a symmetric matrix with average distances between given clusters.
:param clusters: list of clusters
:type clusters: list of lists
.. method:: invert(type)
Invert values in the symmetric matrix.
:param type: 0 (-X), 1 (1 - X), 2 (max - X), 3 (1 / X)
:type type: int
.. method:: normalize(type)
Normalize values in the symmetric matrix.
:param type: 0 (normalize to [0, 1] interval), 1 (Sigmoid)
:type type: int
Indexing
..........
For symmetric matrices the order of indices is not important:
if ``m`` is a SymMatrix, then ``m[2, 4]`` addresses the same element as ``m[4, 2]``.
..
.. literalinclude:: code/symmatrix.py
:lines: 1-6
>>> import Orange
>>> m = Orange.misc.SymMatrix(4)
>>> for i in range(4):
... for j in range(i+1):
... m[i, j] = (i+1)*(j+1)
Although only the lower left half of the matrix was set explicitely,
the whole matrix is constructed.
>>> print m
(( 1.000, 2.000, 3.000, 4.000),
( 2.000, 4.000, 6.000, 8.000),
( 3.000, 6.000, 9.000, 12.000),
( 4.000, 8.000, 12.000, 16.000))
Entire rows are indexed with a single index. They can be iterated
over in a for loop or sliced (with, for example, ``m[:3]``):
>>> print m[1]
(2.0, 4.0, 6.0, 8.0)
>>> m.matrix_type = m.Lower
>>> for row in m:
... print row
(1.0,)
(2.0, 4.0)
(3.0, 6.0, 9.0)
(4.0, 8.0, 12.0, 16.0)
.. index: Random number generator
-----------------------
Random number generator
-----------------------
:obj:`Random` uses the
`Mersenne twister <http://en.wikipedia.org/wiki/Mersenne_twister>`_ algorithm
to generate random numbers.
::
>>> import Orange
>>> rg = Orange.misc.Random(42)
>>> rg(10)
4
>>> rg(10)
7
>>> rg.uses # We called rg two times.
2
>>> rg.reset()
>>> rg(10)
4
>>> rg(10)
7
>>> rg.uses
2
.. class:: Random(seed)
:param initseed: Seed used for initializing the random generator.
:type initseed: int
.. method:: __call__(n)
Return a random integer R such that 0 <= R < n.
:type n: int
.. method:: reset([seed])
Reinitialize the random generator with `initseed`. If `initseed`
is not given use the existing value of attribute `initseed`.
.. attribute:: uses
The number of times the generator was called after
initialization/reset.
.. attribute:: initseed
Random seed.
Two examples or random number generator uses found in the documentation
are :obj:`Orange.evaluation.testing` and :obj:`Orange.data.Table`.
"""
from Orange.core import RandomGenerator as Random
from Orange.core import SymMatrix
from Orange.core import CostMatrix
| gpl-3.0 |
meetsandeepan/meetsandeepan.github.io | node_modules/pygmentize-bundled/vendor/pygments/build-3.3/pygments/formatters/rtf.py | 364 | 4536 | # -*- coding: utf-8 -*-
"""
pygments.formatters.rtf
~~~~~~~~~~~~~~~~~~~~~~~
A formatter that generates RTF files.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.formatter import Formatter
__all__ = ['RtfFormatter']
class RtfFormatter(Formatter):
"""
Format tokens as RTF markup. This formatter automatically outputs full RTF
documents with color information and other useful stuff. Perfect for Copy and
Paste into Microsoft® Word® documents.
*New in Pygments 0.6.*
Additional options accepted:
`style`
The style to use, can be a string or a Style subclass (default:
``'default'``).
`fontface`
The used font famliy, for example ``Bitstream Vera Sans``. Defaults to
some generic font which is supposed to have fixed width.
"""
name = 'RTF'
aliases = ['rtf']
filenames = ['*.rtf']
unicodeoutput = False
def __init__(self, **options):
"""
Additional options accepted:
``fontface``
Name of the font used. Could for example be ``'Courier New'``
to further specify the default which is ``'\fmodern'``. The RTF
specification claims that ``\fmodern`` are "Fixed-pitch serif
and sans serif fonts". Hope every RTF implementation thinks
the same about modern...
"""
Formatter.__init__(self, **options)
self.fontface = options.get('fontface') or ''
def _escape(self, text):
return text.replace('\\', '\\\\') \
.replace('{', '\\{') \
.replace('}', '\\}')
def _escape_text(self, text):
# empty strings, should give a small performance improvment
if not text:
return ''
# escape text
text = self._escape(text)
if self.encoding in ('utf-8', 'utf-16', 'utf-32'):
encoding = 'iso-8859-15'
else:
encoding = self.encoding or 'iso-8859-15'
buf = []
for c in text:
if ord(c) > 128:
ansic = c.encode(encoding, 'ignore') or '?'
if ord(ansic) > 128:
ansic = '\\\'%x' % ord(ansic)
else:
ansic = c
buf.append(r'\ud{\u%d%s}' % (ord(c), ansic))
else:
buf.append(str(c))
return ''.join(buf).replace('\n', '\\par\n')
def format_unencoded(self, tokensource, outfile):
# rtf 1.8 header
outfile.write(r'{\rtf1\ansi\deff0'
r'{\fonttbl{\f0\fmodern\fprq1\fcharset0%s;}}'
r'{\colortbl;' % (self.fontface and
' ' + self._escape(self.fontface) or
''))
# convert colors and save them in a mapping to access them later.
color_mapping = {}
offset = 1
for _, style in self.style:
for color in style['color'], style['bgcolor'], style['border']:
if color and color not in color_mapping:
color_mapping[color] = offset
outfile.write(r'\red%d\green%d\blue%d;' % (
int(color[0:2], 16),
int(color[2:4], 16),
int(color[4:6], 16)
))
offset += 1
outfile.write(r'}\f0')
# highlight stream
for ttype, value in tokensource:
while not self.style.styles_token(ttype) and ttype.parent:
ttype = ttype.parent
style = self.style.style_for_token(ttype)
buf = []
if style['bgcolor']:
buf.append(r'\cb%d' % color_mapping[style['bgcolor']])
if style['color']:
buf.append(r'\cf%d' % color_mapping[style['color']])
if style['bold']:
buf.append(r'\b')
if style['italic']:
buf.append(r'\i')
if style['underline']:
buf.append(r'\ul')
if style['border']:
buf.append(r'\chbrdr\chcfpat%d' %
color_mapping[style['border']])
start = ''.join(buf)
if start:
outfile.write('{%s ' % start)
outfile.write(self._escape_text(value))
if start:
outfile.write('}')
outfile.write('}')
| mit |
fkorotkov/pants | tests/python/pants_test/core_tasks/test_roots.py | 15 | 1306 | # coding=utf-8
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
import os
from pants.base.build_environment import get_buildroot
from pants.core_tasks.roots import ListRoots
from pants_test.tasks.task_test_base import ConsoleTaskTestBase
class ListRootsTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return ListRoots
def _create_source_roots(self, source_root_dict):
self.set_options_for_scope('source', source_roots=source_root_dict)
for dir in source_root_dict.keys():
os.makedirs(os.path.join(get_buildroot(), dir))
def test_no_langs(self):
self._create_source_roots({'fakeroot': tuple()})
self.assert_console_output('fakeroot: *')
def test_single_source_root(self):
self._create_source_roots({'fakeroot': ('lang1', 'lang2')})
self.assert_console_output('fakeroot: lang1,lang2')
def test_multiple_source_roots(self):
self._create_source_roots({'fakerootA': ('lang1',),
'fakerootB': ('lang2',)})
self.assert_console_output('fakerootA: lang1', 'fakerootB: lang2')
| apache-2.0 |
Nolski/olympia | lib/video/tests.py | 13 | 7557 | import os
import stat
import tempfile
import pytest
from mock import Mock, patch
from nose import SkipTest
from nose.tools import eq_
import waffle
from django.conf import settings
import amo
import amo.tests
from amo.tests.test_helpers import get_image_path
from devhub.models import UserLog
from lib.video import get_library
from lib.video import ffmpeg, totem
from lib.video.tasks import resize_video
from users.models import UserProfile
pytestmark = pytest.mark.django_db
files = {
'good': os.path.join(os.path.dirname(__file__),
'fixtures/disco-truncated.webm'),
'bad': get_image_path('mozilla.png'),
}
older_output = """
Input #0, matroska,webm, from 'lib/video/fixtures/disco-truncated.webm':
Duration: 00:00:10.00, start: 0.000000, bitrate: 298 kb/s
Stream #0:0(eng): Video: vp8, yuv420p, 640x360, SAR 1:1 DAR 16:9,
Stream #0:1(eng): Audio: vorbis, 44100 Hz, stereo, s16 (default)
"""
other_output = """
Input #0, matroska, from 'disco-truncated.webm':
Metadata:
doctype : webm
"""
totem_indexer_good = """
TOTEM_INFO_DURATION=10
TOTEM_INFO_HAS_VIDEO=True
TOTEM_INFO_VIDEO_WIDTH=640
TOTEM_INFO_VIDEO_HEIGHT=360
TOTEM_INFO_VIDEO_CODEC=VP8 video
TOTEM_INFO_FPS=25
TOTEM_INFO_HAS_AUDIO=True
TOTEM_INFO_AUDIO_BITRATE=128
TOTEM_INFO_AUDIO_CODEC=Vorbis
TOTEM_INFO_AUDIO_SAMPLE_RATE=44100
TOTEM_INFO_AUDIO_CHANNELS=Stereo
"""
totem_indexer_bad = """
TOTEM_INFO_HAS_VIDEO=False
TOTEM_INFO_HAS_AUDIO=False
"""
class TestFFmpegVideo(amo.tests.TestCase):
def setUp(self):
super(TestFFmpegVideo, self).setUp()
self.video = ffmpeg.Video(files['good'])
if not ffmpeg.Video.library_available():
raise SkipTest
self.video._call = Mock()
self.video._call.return_value = older_output
def test_meta(self):
self.video.get_meta()
eq_(self.video.meta['formats'], ['matroska', 'webm'])
eq_(self.video.meta['duration'], 10.0)
eq_(self.video.meta['dimensions'], (640, 360))
def test_valid(self):
self.video.get_meta()
assert self.video.is_valid()
def test_dev_valid(self):
self.video._call.return_value = other_output
self.video.get_meta()
eq_(self.video.meta['formats'], ['webm'])
# These tests can be a little bit slow, to say the least so they are
# skipped. Un-skip them if you want.
def test_screenshot(self):
raise SkipTest
self.video.get_meta()
try:
screenshot = self.video.get_screenshot(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(screenshot)[stat.ST_SIZE]
finally:
os.remove(screenshot)
def test_encoded(self):
raise SkipTest
self.video.get_meta()
try:
video = self.video.get_encoded(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(video)[stat.ST_SIZE]
finally:
os.remove(video)
class TestBadFFmpegVideo(amo.tests.TestCase):
def setUp(self):
super(TestBadFFmpegVideo, self).setUp()
self.video = ffmpeg.Video(files['bad'])
if not self.video.library_available():
raise SkipTest
self.video.get_meta()
def test_meta(self):
eq_(self.video.meta['formats'], ['image2'])
assert not self.video.is_valid()
def test_valid(self):
assert not self.video.is_valid()
def test_screenshot(self):
self.assertRaises(AssertionError, self.video.get_screenshot,
amo.ADDON_PREVIEW_SIZES[0])
def test_encoded(self):
self.assertRaises(AssertionError, self.video.get_encoded,
amo.ADDON_PREVIEW_SIZES[0])
class TestTotemVideo(amo.tests.TestCase):
def setUp(self):
super(TestTotemVideo, self).setUp()
self.video = totem.Video(files['good'])
self.video._call_indexer = Mock()
def test_meta(self):
self.video._call_indexer.return_value = totem_indexer_good
self.video.get_meta()
eq_(self.video.meta['formats'], 'VP8')
eq_(self.video.meta['duration'], '10')
def test_valid(self):
self.video._call_indexer = Mock()
self.video._call_indexer.return_value = totem_indexer_good
self.video.get_meta()
assert self.video.is_valid()
def test_not_valid(self):
self.video._call_indexer.return_value = totem_indexer_bad
self.video.get_meta()
assert not self.video.is_valid()
# These tests can be a little bit slow, to say the least so they are
# skipped. Un-skip them if you want.
def test_screenshot(self):
raise SkipTest
self.video.get_meta()
try:
screenshot = self.video.get_screenshot(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(screenshot)[stat.ST_SIZE]
finally:
os.remove(screenshot)
def test_encoded(self):
raise SkipTest
self.video.get_meta()
try:
video = self.video.get_encoded(amo.ADDON_PREVIEW_SIZES[0])
assert os.stat(video)[stat.ST_SIZE]
finally:
os.remove(video)
@patch('lib.video.totem.Video.library_available')
@patch('lib.video.ffmpeg.Video.library_available')
@patch.object(settings, 'VIDEO_LIBRARIES',
['lib.video.totem', 'lib.video.ffmpeg'])
def test_choose(ffmpeg_, totem_):
ffmpeg_.return_value = True
totem_.return_value = True
eq_(get_library(), totem.Video)
totem_.return_value = False
eq_(get_library(), ffmpeg.Video)
ffmpeg_.return_value = False
eq_(get_library(), None)
class TestTask(amo.tests.TestCase):
# TODO(andym): make these more sparkly and cope with totem and not blow
# up all the time.
def setUp(self):
super(TestTask, self).setUp()
waffle.models.Switch.objects.create(name='video-encode', active=True)
self.mock = Mock()
self.mock.thumbnail_path = tempfile.mkstemp()[1]
self.mock.image_path = tempfile.mkstemp()[1]
self.mock.pk = 1
@patch('lib.video.tasks._resize_video')
def test_resize_error(self, _resize_video):
user = UserProfile.objects.create(email='a@a.com')
_resize_video.side_effect = ValueError
with self.assertRaises(ValueError):
resize_video(files['good'], self.mock, user=user)
assert self.mock.delete.called
assert UserLog.objects.filter(
user=user, activity_log__action=amo.LOG.VIDEO_ERROR.id).exists()
@patch('lib.video.tasks._resize_video')
def test_resize_failed(self, _resize_video):
user = UserProfile.objects.create(email='a@a.com')
_resize_video.return_value = None
resize_video(files['good'], self.mock, user=user)
assert self.mock.delete.called
@patch('lib.video.ffmpeg.Video.get_encoded')
def test_resize_video_no_encode(self, get_encoded):
raise SkipTest
waffle.models.Switch.objects.update(name='video-encode', active=False)
resize_video(files['good'], self.mock)
assert not get_encoded.called
assert isinstance(self.mock.sizes, dict)
assert self.mock.save.called
def test_resize_video(self):
raise SkipTest
resize_video(files['good'], self.mock)
assert isinstance(self.mock.sizes, dict)
assert self.mock.save.called
def test_resize_image(self):
raise SkipTest
resize_video(files['bad'], self.mock)
assert not isinstance(self.mock.sizes, dict)
assert not self.mock.save.called
| bsd-3-clause |
mapbox/rio-mbtiles | mbtiles/scripts/cli.py | 1 | 19183 | """mbtiles CLI"""
import functools
import logging
import math
import os
import sqlite3
import sys
import click
from cligj.features import iter_features
import mercantile
import rasterio
from rasterio.enums import Resampling
from rasterio.errors import FileOverwriteError
from rasterio.rio.options import creation_options, output_opt, _cb_key_val
from rasterio.warp import transform, transform_geom
import shapely.affinity
from shapely.geometry import mapping, shape
from shapely.ops import unary_union
import shapely.wkt
import supermercado.burntiles
from tqdm import tqdm
from mbtiles import __version__ as mbtiles_version
DEFAULT_NUM_WORKERS = None
RESAMPLING_METHODS = [method.name for method in Resampling]
TILES_CRS = "EPSG:3857"
log = logging.getLogger(__name__)
def resolve_inout(
input=None, output=None, files=None, overwrite=False, append=False, num_inputs=None
):
"""Resolves inputs and outputs from standard args and options.
Parameters
----------
input : str
A single input filename, optional.
output : str
A single output filename, optional.
files : str
A sequence of filenames in which the last is the output filename.
overwrite : bool
Whether to force overwriting the output file.
append : bool
Whether to append to the output file.
num_inputs : int
Raise exceptions if the number of resolved input files is higher
or lower than this number.
Returns
-------
tuple (str, list of str)
The resolved output filename and input filenames as a tuple of
length 2.
If provided, the output file may be overwritten. An output
file extracted from files will not be overwritten unless
overwrite is True.
Raises
------
click.BadParameter
"""
resolved_output = output or (files[-1] if files else None)
resolved_inputs = (
[input]
if input
else [] + list(files[: -1 if not output else None])
if files
else []
)
if num_inputs is not None:
if len(resolved_inputs) < num_inputs:
raise click.BadParameter("Insufficient inputs")
elif len(resolved_inputs) > num_inputs:
raise click.BadParameter("Too many inputs")
return resolved_output, resolved_inputs
def extract_features(ctx, param, value):
if value is not None:
with click.open_file(value, encoding="utf-8") as src:
return list(iter_features(iter(src)))
else:
return None
@click.command(short_help="Export a dataset to MBTiles.")
@click.argument(
"files",
nargs=-1,
type=click.Path(resolve_path=True),
required=True,
metavar="INPUT [OUTPUT]",
)
@output_opt
@click.option(
"--append/--overwrite",
default=True,
is_flag=True,
help="Append tiles to an existing file or overwrite.",
)
@click.option("--title", help="MBTiles dataset title.")
@click.option("--description", help="MBTiles dataset description.")
@click.option(
"--overlay",
"layer_type",
flag_value="overlay",
default=True,
help="Export as an overlay (the default).",
)
@click.option(
"--baselayer", "layer_type", flag_value="baselayer", help="Export as a base layer."
)
@click.option(
"-f",
"--format",
"img_format",
type=click.Choice(["JPEG", "PNG", "WEBP"]),
default="JPEG",
help="Tile image format.",
)
@click.option(
"--tile-size",
default=256,
show_default=True,
type=int,
help="Width and height of individual square tiles to create.",
)
@click.option(
"--zoom-levels",
default=None,
metavar="MIN..MAX",
help="A min...max range of export zoom levels. "
"The default zoom level "
"is the one at which the dataset is contained within "
"a single tile.",
)
@click.option(
"--image-dump",
metavar="PATH",
help="A directory into which image tiles will be optionally " "dumped.",
)
@click.option(
"-j",
"num_workers",
type=int,
default=DEFAULT_NUM_WORKERS,
help="Number of workers (default: number of computer's processors).",
)
@click.option(
"--src-nodata",
default=None,
show_default=True,
type=float,
help="Manually override source nodata",
)
@click.option(
"--dst-nodata",
default=None,
show_default=True,
type=float,
help="Manually override destination nodata",
)
@click.option(
"--resampling",
type=click.Choice(RESAMPLING_METHODS),
default="nearest",
show_default=True,
help="Resampling method to use.",
)
@click.version_option(version=mbtiles_version, message="%(version)s")
@click.option(
"--rgba", default=False, is_flag=True, help="Select RGBA output. For PNG or WEBP only."
)
@click.option(
"--implementation",
"implementation",
type=click.Choice(["cf", "mp"]),
default=None,
help="Concurrency implementation. Use concurrent.futures (cf) or multiprocessing (mp).",
)
@click.option(
"--progress-bar", "-#", default=False, is_flag=True, help="Display progress bar."
)
@click.option("--covers", help="Restrict mbtiles output to cover a quadkey")
@click.option(
"--cutline",
type=click.Path(exists=True),
callback=extract_features,
default=None,
help="Path to a GeoJSON FeatureCollection to be used as a cutline. Only source pixels within the cutline features will be exported.",
)
@click.option(
"--oo",
"open_options",
metavar="NAME=VALUE",
multiple=True,
callback=_cb_key_val,
help="Format driver-specific options to be used when accessing the input dataset. See the GDAL format driver documentation for more information.",
)
@creation_options
@click.option(
"--wo",
"warp_options",
metavar="NAME=VALUE",
multiple=True,
callback=_cb_key_val,
help="See the GDAL warp options documentation for more information.",
)
@click.pass_context
def mbtiles(
ctx,
files,
output,
append,
title,
description,
layer_type,
img_format,
tile_size,
zoom_levels,
image_dump,
num_workers,
src_nodata,
dst_nodata,
resampling,
rgba,
implementation,
progress_bar,
covers,
cutline,
open_options,
creation_options,
warp_options,
):
"""Export a dataset to MBTiles (version 1.3) in a SQLite file.
The input dataset may have any coordinate reference system. It must
have at least three bands, which will be become the red, blue, and
green bands of the output image tiles.
An optional fourth alpha band may be copied to the output tiles by
using the --rgba option in combination with the PNG or WEBP formats.
This option requires that the input dataset has at least 4 bands.
The default quality for JPEG and WEBP output (possible range:
10-100) is 75. This value can be changed with the use of the QUALITY
creation option, e.g. `--co QUALITY=90`. The default zlib
compression level for PNG output (possible range: 1-9) is 6. This
value can be changed like `--co ZLEVEL=8`. Lossless WEBP can be
chosen with `--co LOSSLESS=TRUE`.
If no zoom levels are specified, the defaults are the zoom levels
nearest to the one at which one tile may contain the entire source
dataset.
If a title or description for the output file are not provided,
they will be taken from the input dataset's filename.
This command is suited for small to medium (~1 GB) sized sources.
Python package: rio-mbtiles (https://github.com/mapbox/rio-mbtiles).
"""
log = logging.getLogger(__name__)
output, files = resolve_inout(
files=files, output=output, overwrite=not (append), append=append, num_inputs=1,
)
inputfile = files[0]
if implementation == "cf" and sys.version_info < (3, 7):
raise click.BadParameter(
"concurrent.futures implementation requires python>=3.7"
)
elif implementation == "cf":
from mbtiles.cf import process_tiles
elif implementation == "mp":
from mbtiles.mp import process_tiles
elif sys.version_info >= (3, 7):
from mbtiles.cf import process_tiles
else:
from mbtiles.mp import process_tiles
with ctx.obj["env"]:
# Read metadata from the source dataset.
with rasterio.open(inputfile, **open_options) as src:
if dst_nodata is not None and (
src_nodata is None and src.profile.get("nodata") is None
):
raise click.BadParameter(
"--src-nodata must be provided because " "dst-nodata is not None."
)
base_kwds = {"dst_nodata": dst_nodata, "src_nodata": src_nodata}
if src_nodata is not None:
base_kwds.update(nodata=src_nodata)
if dst_nodata is not None:
base_kwds.update(nodata=dst_nodata)
# Name and description.
title = title or os.path.basename(src.name)
description = description or src.name
# Compute the geographic bounding box of the dataset.
(west, east), (south, north) = transform(
src.crs, "EPSG:4326", src.bounds[::2], src.bounds[1::2]
)
# cutlines must be transformed from CRS84 to src pixel/line
# coordinates and then formatted as WKT.
if cutline is not None:
geoms = [shape(f["geometry"]) for f in cutline]
union = unary_union(geoms)
if union.geom_type not in ("MultiPolygon", "Polygon"):
raise click.ClickException("Unexpected cutline geometry type")
west, south, east, north = union.bounds
cutline_src = shape(
transform_geom("OGC:CRS84", src.crs, mapping(union))
)
invtransform = ~src.transform
shapely_matrix = (
invtransform.a,
invtransform.b,
invtransform.d,
invtransform.e,
invtransform.xoff,
invtransform.yoff,
)
cutline_rev = shapely.affinity.affine_transform(
cutline_src, shapely_matrix
)
warp_options["cutline"] = shapely.wkt.dumps(cutline_rev)
if covers is not None:
covers_tile = mercantile.quadkey_to_tile(covers)
west, south, east, north = mercantile.bounds(covers_tile)
# Resolve the minimum and maximum zoom levels for export.
if zoom_levels:
minzoom, maxzoom = map(int, zoom_levels.split(".."))
else:
zw = int(round(math.log(360.0 / (east - west), 2.0)))
zh = int(round(math.log(170.1022 / (north - south), 2.0)))
minzoom = min(zw, zh)
maxzoom = max(zw, zh)
log.debug("Zoom range: %d..%d", minzoom, maxzoom)
if rgba:
if img_format == "JPEG":
raise click.BadParameter(
"RGBA output is not possible with JPEG format."
)
else:
count = 4
else:
count = 3
# Parameters for creation of tile images.
base_kwds.update(
{
"driver": img_format.upper(),
"dtype": "uint8",
"nodata": 0,
"height": tile_size,
"width": tile_size,
"count": count,
"crs": TILES_CRS,
}
)
img_ext = "jpg" if img_format.lower() == "jpeg" else img_format.lower()
# Constrain bounds.
EPS = 1.0e-10
west = max(-180 + EPS, west)
south = max(-85.051129, south)
east = min(180 - EPS, east)
north = min(85.051129, north)
if progress_bar:
# Estimate total number of tiles.
west_merc, south_merc = mercantile.xy(west, south)
east_merc, north_merc = mercantile.xy(east, north)
raster_area = (east_merc - west_merc) * (north_merc - south_merc)
est_num_tiles = 0
zoom = minzoom
(
minz_west_merc,
minz_south_merc,
minz_east_merc,
minz_north_merc,
) = mercantile.xy_bounds(mercantile.tile(0, 0, zoom))
minzoom_tile_area = (minz_east_merc - minz_west_merc) * (
minz_north_merc - minz_south_merc
)
ratio = min_ratio = raster_area / minzoom_tile_area
# If given a cutline, we use its mercator area and the
# supermercado module to help estimate the number of output
# tiles.
if cutline:
geoms = [shape(f["geometry"]) for f in cutline]
union = unary_union(geoms)
cutline_mercator = transform_geom(
"OGC:CRS84", "EPSG:3857", mapping(union)
)
min_ratio *= shape(cutline_mercator).area / raster_area
ratio = min_ratio
estimator = functools.partial(supermercado.burntiles.burn, cutline)
else:
estimator = functools.partial(
mercantile.tiles, west, south, east, north
)
est_num_tiles = len(list(estimator(zoom)))
ratio *= 4.0
while zoom < maxzoom and ratio < 16:
zoom += 1
est_num_tiles += len(list(estimator(zoom)))
ratio *= 4.0
else:
zoom += 1
est_num_tiles += int(
sum(
math.ceil(math.pow(4.0, z - minzoom) * min_ratio)
for z in range(zoom, maxzoom + 1)
)
)
pbar = tqdm(total=est_num_tiles)
else:
pbar = None
# Initialize the sqlite db.
output_exists = os.path.exists(output)
if append:
appending = output_exists
elif output_exists:
appending = False
log.info("Overwrite mode chosen, unlinking output file.")
os.unlink(output)
# workaround for bug here: https://bugs.python.org/issue27126
sqlite3.connect(":memory:").close()
conn = sqlite3.connect(output)
def init_mbtiles():
"""Note: this closes over other local variables of the command function."""
cur = conn.cursor()
if appending:
cur.execute("SELECT * FROM metadata WHERE name = 'bounds';")
(
_,
bounds,
) = cur.fetchone()
prev_west, prev_south, prev_east, prev_north = map(
float, bounds.split(",")
)
new_west = min(west, prev_west)
new_south = min(south, prev_south)
new_east = max(east, prev_east)
new_north = max(north, prev_north)
cur.execute(
"UPDATE metadata SET value = ? WHERE name = 'bounds';",
("%f,%f,%f,%f" % (new_west, new_south, new_east, new_north),),
)
else:
cur.execute(
"CREATE TABLE IF NOT EXISTS tiles "
"(zoom_level integer, tile_column integer, "
"tile_row integer, tile_data blob);"
)
cur.execute(
"CREATE UNIQUE INDEX idx_zcr ON tiles (zoom_level, tile_column, tile_row);"
)
cur.execute(
"CREATE TABLE IF NOT EXISTS metadata (name text, value text);"
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES (?, ?);", ("name", title)
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES (?, ?);",
("type", layer_type),
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES (?, ?);",
("version", "1.1"),
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES (?, ?);",
("description", description),
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES (?, ?);",
("format", img_ext),
)
cur.execute(
"INSERT INTO metadata (name, value) VALUES ('bounds', ?);",
("%f,%f,%f,%f" % (west, south, east, north),),
)
conn.commit()
def insert_results(tile, contents, img_ext=None, image_dump=None):
"""Also a closure."""
cursor = conn.cursor()
if contents is None:
log.info("Tile %r is empty and will be skipped", tile)
return
# MBTiles have a different origin than Mercantile/tilebelt.
tiley = int(math.pow(2, tile.z)) - tile.y - 1
# Optional image dump.
if image_dump:
img_name = "{}-{}-{}.{}".format(tile.x, tiley, tile.z, img_ext)
img_path = os.path.join(image_dump, img_name)
with open(img_path, "wb") as img:
img.write(contents)
# Insert tile into db.
log.info("Inserting tile: tile=%r", tile)
cursor.execute(
"INSERT OR REPLACE INTO tiles "
"(zoom_level, tile_column, tile_row, tile_data) "
"VALUES (?, ?, ?, ?);",
(tile.z, tile.x, tiley, sqlite3.Binary(contents)),
)
def commit_mbtiles():
conn.commit()
if cutline:
def gen_tiles():
for zk in range(minzoom, maxzoom + 1):
for arr in supermercado.burntiles.burn(cutline, zk):
# Supermercado's numpy scalars must be cast to
# ints. Python's sqlite module does not do this
# for us.
yield mercantile.Tile(*(int(v) for v in arr))
tiles = gen_tiles()
else:
tiles = mercantile.tiles(
west, south, east, north, range(minzoom, maxzoom + 1)
)
with conn:
process_tiles(
tiles,
init_mbtiles,
insert_results,
commit_mbtiles,
num_workers=num_workers,
inputfile=inputfile,
base_kwds=base_kwds,
resampling=resampling,
img_ext=img_ext,
image_dump=image_dump,
progress_bar=pbar,
open_options=open_options,
creation_options=creation_options,
warp_options=warp_options,
)
if pbar is not None:
pbar.update(pbar.total - pbar.n)
| mit |
vjmac15/Lyilis | lib/youtube_dl/extractor/odatv (VJ Washington's conflicted copy 2017-08-29).py | 80 | 1497 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
ExtractorError,
NO_DEFAULT,
remove_start
)
class OdaTVIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?odatv\.com/(?:mob|vid)_video\.php\?.*\bid=(?P<id>[^&]+)'
_TESTS = [{
'url': 'http://odatv.com/vid_video.php?id=8E388',
'md5': 'dc61d052f205c9bf2da3545691485154',
'info_dict': {
'id': '8E388',
'ext': 'mp4',
'title': 'Artık Davutoğlu ile devam edemeyiz'
}
}, {
# mobile URL
'url': 'http://odatv.com/mob_video.php?id=8E388',
'only_matching': True,
}, {
# no video
'url': 'http://odatv.com/mob_video.php?id=8E900',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
no_video = 'NO VIDEO!' in webpage
video_url = self._search_regex(
r'mp4\s*:\s*(["\'])(?P<url>http.+?)\1', webpage, 'video url',
default=None if no_video else NO_DEFAULT, group='url')
if no_video:
raise ExtractorError('Video %s does not exist' % video_id, expected=True)
return {
'id': video_id,
'url': video_url,
'title': remove_start(self._og_search_title(webpage), 'Video: '),
'thumbnail': self._og_search_thumbnail(webpage),
}
| gpl-3.0 |
kaiweifan/vse-lbaas-plugin-poc | quantum/agent/metadata/namespace_proxy.py | 1 | 5567 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Mark McClain, DreamHost
import httplib
import socket
import urlparse
import eventlet
import httplib2
from oslo.config import cfg
import webob
from quantum.agent.linux import daemon
from quantum.common import config
from quantum.common import utils
from quantum.openstack.common import log as logging
from quantum import wsgi
proxy_socket = cfg.StrOpt('metadata_proxy_socket',
default='$state_path/metadata_proxy',
help=_('Location of Metadata Proxy UNIX domain '
'socket'))
cfg.CONF.register_opt(proxy_socket)
LOG = logging.getLogger(__name__)
class UnixDomainHTTPConnection(httplib.HTTPConnection):
"""Connection class for HTTP over UNIX domain socket."""
def __init__(self, host, port=None, strict=None, timeout=None,
proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
def connect(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if self.timeout:
self.sock.settimeout(self.timeout)
self.sock.connect(cfg.CONF.metadata_proxy_socket)
class NetworkMetadataProxyHandler(object):
"""Proxy AF_INET metadata request through Unix Domain socket.
The Unix domain socket allows the proxy access resource that are not
accessible within the isolated tenant context.
"""
def __init__(self, network_id=None, router_id=None):
self.network_id = network_id
self.router_id = router_id
if network_id is None and router_id is None:
msg = _('network_id and router_id are None. One must be provided.')
raise ValueError(msg)
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
LOG.debug(_("Request: %s"), req)
try:
return self._proxy_request(req.remote_addr,
req.path_info,
req.query_string)
except Exception, e:
LOG.exception(_("Unexpected error."))
msg = _('An unknown error has occurred. '
'Please try your request again.')
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
def _proxy_request(self, remote_address, path_info, query_string):
headers = {
'X-Forwarded-For': remote_address,
}
if self.router_id:
headers['X-Quantum-Router-ID'] = self.router_id
else:
headers['X-Quantum-Network-ID'] = self.network_id
url = urlparse.urlunsplit((
'http',
'169.254.169.254', # a dummy value to make the request proper
path_info,
query_string,
''))
h = httplib2.Http()
resp, content = h.request(
url,
headers=headers,
connection_type=UnixDomainHTTPConnection)
if resp.status == 200:
LOG.debug(resp)
LOG.debug(content)
return content
elif resp.status == 404:
return webob.exc.HTTPNotFound()
elif resp.status == 500:
msg = _(
'Remote metadata server experienced an internal server error.'
)
LOG.debug(msg)
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
else:
raise Exception(_('Unexpected response code: %s') % resp.status)
class ProxyDaemon(daemon.Daemon):
def __init__(self, pidfile, port, network_id=None, router_id=None):
super(ProxyDaemon, self).__init__(pidfile)
self.network_id = network_id
self.router_id = router_id
self.port = port
def run(self):
handler = NetworkMetadataProxyHandler(
self.network_id,
self.router_id)
proxy = wsgi.Server('quantum-network-metadata-proxy')
proxy.start(handler, self.port)
proxy.wait()
def main():
eventlet.monkey_patch()
opts = [
cfg.StrOpt('network_id'),
cfg.StrOpt('router_id'),
cfg.StrOpt('pid_file'),
cfg.BoolOpt('daemonize', default=True),
cfg.IntOpt('metadata_port',
default=9697,
help=_("TCP Port to listen for metadata server "
"requests.")),
]
cfg.CONF.register_cli_opts(opts)
# Don't get the default configuration file
cfg.CONF(project='quantum', default_config_files=[])
config.setup_logging(cfg.CONF)
utils.log_opt_values(LOG)
proxy = ProxyDaemon(cfg.CONF.pid_file,
cfg.CONF.metadata_port,
network_id=cfg.CONF.network_id,
router_id=cfg.CONF.router_id)
if cfg.CONF.daemonize:
proxy.start()
else:
proxy.run()
| apache-2.0 |
scemama/quantum_package | scripts/generate_h_apply.py | 1 | 14997 | #!/usr/bin/env python2
import os
keywords = """
check_double_excitation
copy_buffer
declarations
decls_main
deinit_thread
init_main
filter_integrals
filter2p
filter2h2p_double
filter2h2p_single
filter1h
filter1p
only_2p_single
only_2p_double
only_2h_single
only_2h_double
only_1h_single
only_1h_double
only_1p_single
only_1p_double
only_2h1p_single
only_2h1p_double
filter_only_1h1p_single
filter_only_1h1p_double
filter_only_1h2p_single
filter_only_1h2p_double
filter_only_2h2p_single
filter_only_2h2p_double
filterhole
filter_only_1h1p_double
filter_only_1h1p_single
filterparticle
filter_vvvv_excitation
finalization
generate_psi_guess
initialization
init_main
init_thread
keys_work
omp_barrier
omp_do
omp_enddo
omp_end_master
omp_end_parallel
omp_master
omp_parallel
only_2p_double
only_2p_single
parameters
params_main
printout_always
printout_now
subroutine
""".split()
class H_apply(object):
def read_template(self):
file = open(os.environ["QP_ROOT"]+'/src/Determinants/H_apply.template.f','r')
self.template = file.read()
file.close()
file = open(os.environ["QP_ROOT"]+'/src/Determinants/H_apply_nozmq.template.f','r')
self.template += file.read()
file.close()
def __init__(self,sub,SingleRef=False,do_mono_exc=True, do_double_exc=True):
self.read_template()
s = {}
for k in keywords:
s[k] = ""
s["subroutine"] = "H_apply_%s"%(sub)
s["params_post"] = ""
self.selection_pt2 = None
self.energy = "CI_electronic_energy"
self.perturbation = None
self.do_double_exc = do_double_exc
# s["omp_parallel"] = """ PROVIDE elec_num_tab
# !$OMP PARALLEL DEFAULT(SHARED) &
# !$OMP PRIVATE(i,j,k,l,keys_out,hole,particle, &
# !$OMP occ_particle,occ_hole,j_a,k_a,other_spin, &
# !$OMP hole_save,ispin,jj,l_a,ib_jb_pairs,array_pairs, &
# !$OMP accu,i_a,hole_tmp,particle_tmp,occ_particle_tmp, &
# !$OMP occ_hole_tmp,key_idx,i_b,j_b,key,N_elec_in_key_part_1,&
# !$OMP N_elec_in_key_hole_1,N_elec_in_key_part_2, &
# !$OMP N_elec_in_key_hole_2,ia_ja_pairs,key_union_hole_part) &
# !$OMP SHARED(key_in,N_int,elec_num_tab,mo_tot_num, &
# !$OMP hole_1, particl_1, hole_2, particl_2, &
# !$OMP elec_alpha_num,i_generator) FIRSTPRIVATE(iproc)"""
# s["omp_end_parallel"] = "!$OMP END PARALLEL"
# s["omp_master"] = "!$OMP MASTER"
# s["omp_end_master"] = "!$OMP END MASTER"
# s["omp_barrier"] = "!$OMP BARRIER"
# s["omp_do"] = "!$OMP DO SCHEDULE (static,1)"
# s["omp_enddo"] = "!$OMP ENDDO"
d = { True : '.True.', False : '.False.'}
s["do_mono_excitations"] = d[do_mono_exc]
s["do_double_excitations"] = d[do_double_exc]
s["keys_work"] += "call fill_H_apply_buffer_no_selection(key_idx,keys_out,N_int,iproc)"
s["filter_integrals"] = "array_pairs = .True."
if SingleRef:
s["filter_integrals"] = """
call get_mo_bielec_integrals_existing_ik(i_a,j_a,mo_tot_num,array_pairs,mo_integrals_map)
"""
s["generate_psi_guess"] = """
! Sort H_jj to find the N_states lowest states
integer :: i
integer, allocatable :: iorder(:)
double precision, allocatable :: H_jj(:)
double precision, external :: diag_h_mat_elem
allocate(H_jj(N_det),iorder(N_det))
!$OMP PARALLEL DEFAULT(NONE) &
!$OMP SHARED(psi_det,N_int,H_jj,iorder,N_det) &
!$OMP PRIVATE(i)
!$OMP DO
do i = 1, N_det
H_jj(i) = diag_h_mat_elem(psi_det(1,1,i),N_int)
iorder(i) = i
enddo
!$OMP END DO
!$OMP END PARALLEL
call dsort(H_jj,iorder,N_det)
do k=1,N_states
psi_coef(iorder(k),k) = 1.d0
enddo
deallocate(H_jj,iorder)
"""
s["size_max"] = "8192"
s["copy_buffer"] = """call copy_H_apply_buffer_to_wf
if (s2_eig) then
call make_s2_eigenfunction
endif
SOFT_TOUCH psi_det psi_coef N_det
"""
s["printout_now"] = """write(6,*) &
100.*float(i_generator)/float(N_det_generators), '% in ', wall_1-wall_0, 's'"""
self.data = s
def __setitem__(self,key,value):
self.data[key] = value
def __getitem__(self,key):
return self.data[key]
def __repr__(self):
buffer = self.template
for key,value in self.data.items():
buffer = buffer.replace('$'+key, value)
return buffer
def unset_double_excitations(self):
self["do_double_excitations"] = ".False."
self["check_double_excitation"] = """
check_double_excitation = .False.
"""
def filter_vvvv_excitation(self):
self["filter_vvvv_excitation"] = """
key_union_hole_part = 0_bit_kind
call set_bit_to_integer(i_a,key_union_hole_part,N_int)
call set_bit_to_integer(j_a,key_union_hole_part,N_int)
call set_bit_to_integer(i_b,key_union_hole_part,N_int)
call set_bit_to_integer(j_b,key_union_hole_part,N_int)
do jtest_vvvv = 1, N_int
if(iand(key_union_hole_part(jtest_vvvv),virt_bitmask(jtest_vvvv,1).ne.key_union_hole_part(jtest_vvvv)))then
b_cycle = .False.
endif
enddo
if(b_cycle) cycle
"""
def set_filter_holes(self):
self["filterhole"] = """
if(iand(ibset(0_bit_kind,j),hole(k,other_spin)).eq.0_bit_kind )cycle
"""
def set_filter_particl(self):
self["filterparticle"] = """
if(iand(ibset(0_bit_kind,j_a),hole(k_a,other_spin)).eq.0_bit_kind )cycle
"""
def filter_1h(self):
self["filter1h"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h(hole)) cycle
"""
def filter_2p(self):
self["filter2p"] = """
! ! DIR$ FORCEINLINE
if (is_a_2p(hole)) cycle
"""
def filter_1p(self):
self["filter1p"] = """
! ! DIR$ FORCEINLINE
if (is_a_1p(hole)) cycle
"""
def filter_only_2h(self):
self["only_2h_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_2h(hole).eqv. .False.) cycle
"""
self["only_2h_double"] = """
! ! DIR$ FORCEINLINE
if ( is_a_2h(key).eqv. .False. )cycle
"""
def filter_only_1h(self):
self["only_1h_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h(hole) .eqv. .False.) cycle
"""
self["only_1h_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h(key) .eqv. .False.) cycle
"""
def filter_only_1p(self):
self["only_1p_single"] = """
! ! DIR$ FORCEINLINE
if ( is_a_1p(hole) .eqv. .False.) cycle
"""
self["only_1p_double"] = """
! ! DIR$ FORCEINLINE
if ( is_a_1p(key) .eqv. .False.) cycle
"""
def filter_only_2h1p(self):
self["only_2h1p_single"] = """
! ! DIR$ FORCEINLINE
if ( is_a_2h1p(hole) .eqv. .False.) cycle
"""
self["only_2h1p_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_2h1p(key) .eqv. .False.) cycle
"""
def filter_only_2p(self):
self["only_2p_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_2p(hole).eqv. .False.) cycle
"""
self["only_2p_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_2p(key).eqv. .False.) cycle
"""
def filter_only_1h1p(self):
self["filter_only_1h1p_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h1p(hole).eqv..False.) cycle
"""
self["filter_only_1h1p_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h1p(key).eqv..False.) cycle
"""
def filter_only_2h2p(self):
self["filter_only_2h2p_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_two_holes_two_particles(hole).eqv..False.) cycle
"""
self["filter_only_2h2p_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_two_holes_two_particles(key).eqv..False.) cycle
"""
def filter_only_1h2p(self):
self["filter_only_1h2p_single"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h2p(hole).eqv..False.) cycle
"""
self["filter_only_1h2p_double"] = """
! ! DIR$ FORCEINLINE
if (is_a_1h2p(key).eqv..False.) cycle
"""
def set_filter_2h_2p(self):
self["filter2h2p_double"] = """
if (is_a_two_holes_two_particles(key)) cycle
"""
self["filter2h2p_single"] = """
if (is_a_two_holes_two_particles(hole)) cycle
"""
def set_perturbation(self,pert):
if self.perturbation is not None:
raise
self.perturbation = pert
if pert is not None:
self.data["parameters"] = ",sum_e_2_pert_in,sum_norm_pert_in,sum_H_pert_diag_in,N_st,Nint"
self.data["declarations"] = """
integer, intent(in) :: N_st,Nint
double precision, intent(inout) :: sum_e_2_pert_in(N_st)
double precision, intent(inout) :: sum_norm_pert_in(N_st)
double precision, intent(inout) :: sum_H_pert_diag_in(N_st)
double precision :: sum_e_2_pert(N_st)
double precision :: sum_norm_pert(N_st)
double precision :: sum_H_pert_diag(N_st)
double precision, allocatable :: e_2_pert_buffer(:,:)
double precision, allocatable :: coef_pert_buffer(:,:)
ASSERT (Nint == N_int)
"""
self.data["init_thread"] = """
allocate (e_2_pert_buffer(N_st,size_max), coef_pert_buffer(N_st,size_max))
do k=1,N_st
sum_e_2_pert(k) = 0.d0
sum_norm_pert(k) = 0.d0
sum_H_pert_diag(k) = 0.d0
enddo
"""
self.data["deinit_thread"] = """
! OMP CRITICAL
do k=1,N_st
sum_e_2_pert_in(k) = sum_e_2_pert_in(k) + sum_e_2_pert(k)
sum_norm_pert_in(k) = sum_norm_pert_in(k) + sum_norm_pert(k)
sum_H_pert_diag_in(k) = sum_H_pert_diag_in(k) + sum_H_pert_diag(k)
enddo
! OMP END CRITICAL
deallocate (e_2_pert_buffer, coef_pert_buffer)
"""
self.data["size_max"] = "8192"
self.data["initialization"] = """
PROVIDE psi_selectors_coef psi_selectors E_corr_per_selectors psi_det_sorted_bit
"""
if self.do_double_exc == True:
self.data["keys_work"] = """
! if(check_double_excitation)then
call perturb_buffer_%s(i_generator,keys_out,key_idx,e_2_pert_buffer,coef_pert_buffer,sum_e_2_pert, &
sum_norm_pert,sum_H_pert_diag,N_st,N_int,key_mask,fock_diag_tmp,%s)
"""%(pert,self.energy)
else:
self.data["keys_work"] = """
call perturb_buffer_by_mono_%s(i_generator,keys_out,key_idx,e_2_pert_buffer,coef_pert_buffer,sum_e_2_pert, &
sum_norm_pert,sum_H_pert_diag,N_st,N_int,key_mask,fock_diag_tmp,%s)
"""%(pert,self.energy)
self.data["finalization"] = """
"""
self.data["copy_buffer"] = ""
self.data["generate_psi_guess"] = ""
self.data["params_main"] = "pt2, norm_pert, H_pert_diag, N_st"
self.data["params_post"] = ","+self.data["params_main"] +", N_int"
self.data["decls_main"] = """ integer, intent(in) :: N_st
double precision, intent(inout):: pt2(N_st)
double precision, intent(inout):: norm_pert(N_st)
double precision, intent(inout):: H_pert_diag(N_st)
double precision :: delta_pt2(N_st), norm_psi(N_st), pt2_old(N_st)
PROVIDE N_det_generators
do k=1,N_st
pt2(k) = 0.d0
norm_pert(k) = 0.d0
H_pert_diag(k) = 0.d0
norm_psi(k) = 0.d0
delta_pt2(k) = 0.d0
pt2_old(k) = 0.d0
enddo
write(6,'(A12, 1X, A8, 3(2X, A9), 2X, A8, 2X, A8, 2X, A8)') &
'N_generators', 'Norm', 'Delta PT2', 'PT2', 'Est. PT2', 'secs'
write(6,'(A12, 1X, A8, 3(2X, A9), 2X, A8, 2X, A8, 2X, A8)') &
'============', '========', '=========', '=========', '=========', &
'========='
"""
self.data["printout_always"] = """
do k=1,N_st
norm_psi(k) = norm_psi(k) + psi_coef_generators(i_generator,k)*psi_coef_generators(i_generator,k)
delta_pt2(k) = pt2(k) - pt2_old(k)
enddo
"""
self.data["printout_now"] = """
do k=1,N_st
write(6,'(I10, 4(2X, F9.6), 2X, F8.1)') &
i_generator, norm_psi(k), delta_pt2(k), pt2(k), &
pt2(k)/(norm_psi(k)*norm_psi(k)), &
wall_1-wall_0
pt2_old(k) = pt2(k)
enddo
"""
# self.data["omp_parallel"] += """&
# !$OMP SHARED(N_st) PRIVATE(e_2_pert_buffer,coef_pert_buffer) &
# !$OMP PRIVATE(sum_e_2_pert, sum_norm_pert, sum_H_pert_diag)"""
def set_selection_pt2(self,pert):
if self.selection_pt2 is not None:
raise
self.set_perturbation(pert)
self.selection_pt2 = pert
if pert is not None:
self.data["parameters"] += ",select_max_out"
self.data["declarations"] += """
double precision, intent(inout) :: select_max_out"""
self.data["params_post"] += ", select_max(min(i_generator,size(select_max,1)))"
self.data["size_max"] = "8192"
self.data["copy_buffer"] = """
call copy_H_apply_buffer_to_wf
if (s2_eig) then
call make_s2_eigenfunction
endif
SOFT_TOUCH psi_det psi_coef N_det
selection_criterion_min = min(selection_criterion_min, maxval(select_max))*0.1d0
selection_criterion = selection_criterion_min
call write_double(6,selection_criterion,'Selection criterion')
"""
self.data["keys_work"] = """
e_2_pert_buffer = 0.d0
coef_pert_buffer = 0.d0
""" + self.data["keys_work"]
self.data["keys_work"] += """
call fill_H_apply_buffer_selection(key_idx,keys_out,e_2_pert_buffer, &
coef_pert_buffer,N_st,N_int,iproc,select_max_out)
"""
# self.data["omp_parallel"] += """&
# !$OMP REDUCTION (max:select_max_out)"""
def unset_openmp(self):
for k in keywords:
if k.startswith("omp_"):
self[k] = ""
class H_apply_zmq(H_apply):
def read_template(self):
file = open(os.environ["QP_ROOT"]+'/src/Determinants/H_apply.template.f','r')
self.template = file.read()
file.close()
file = open(os.environ["QP_ROOT"]+'/src/Determinants/H_apply_zmq.template.f','r')
self.template += file.read()
file.close()
def set_perturbation(self,pert):
H_apply.set_perturbation(self,pert)
self.data["printout_now"] = ""
self.data["printout_always"] = ""
self.data["decls_main"] = """ integer, intent(in) :: N_st
double precision, intent(inout):: pt2(N_st)
double precision, intent(inout):: norm_pert(N_st)
double precision, intent(inout):: H_pert_diag(N_st)
double precision :: delta_pt2(N_st), norm_psi(N_st), pt2_old(N_st)
PROVIDE N_det_generators
do k=1,N_st
pt2(k) = 0.d0
norm_pert(k) = 0.d0
H_pert_diag(k) = 0.d0
norm_psi(k) = 0.d0
energy(k) = %s(k)
enddo
""" % (self.energy)
self.data["copy_buffer"] = """
do i=1,N_det_generators
do k=1,N_st
pt2(k) = pt2(k) + pt2_generators(k,i)
norm_pert(k) = norm_pert(k) + norm_pert_generators(k,i)
H_pert_diag(k) = H_pert_diag(k) + H_pert_diag_generators(k,i)
enddo
enddo
"""
def set_selection_pt2(self,pert):
H_apply.set_selection_pt2(self,pert)
| agpl-3.0 |
t794104/ansible | lib/ansible/modules/cloud/azure/azure_rm_mysqlserver.py | 25 | 13384 | #!/usr/bin/python
#
# Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: azure_rm_mysqlserver
version_added: "2.5"
short_description: Manage MySQL Server instance
description:
- Create, update and delete instance of MySQL Server.
options:
resource_group:
description:
- The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
required: True
name:
description:
- The name of the server.
required: True
sku:
description:
- The SKU (pricing tier) of the server.
suboptions:
name:
description:
- The name of the sku, typically, tier + family + cores, for example C(B_Gen4_1), C(GP_Gen5_8).
tier:
description:
- The tier of the particular SKU, for example C(Basic).
choices:
- basic
- standard
capacity:
description:
- The scale up/out capacity, representing server's compute units.
size:
description:
- The size code, to be interpreted by resource as appropriate.
location:
description:
- Resource location. If not set, location from the resource group will be used as default.
storage_mb:
description:
- The maximum storage allowed for a server.
type: int
version:
description:
- Server version.
choices:
- 5.6
- 5.7
enforce_ssl:
description:
- Enable SSL enforcement.
type: bool
default: False
admin_username:
description:
- The administrator's login name of a server. Can only be specified when the server is being created (and is required for creation).
admin_password:
description:
- The password of the administrator login.
create_mode:
description:
- Create mode of SQL Server.
default: Default
state:
description:
- Assert the state of the MySQL Server. Use C(present) to create or update a server and C(absent) to delete it.
default: present
choices:
- absent
- present
extends_documentation_fragment:
- azure
- azure_tags
author:
- Zim Kalinowski (@zikalino)
'''
EXAMPLES = '''
- name: Create (or update) MySQL Server
azure_rm_mysqlserver:
resource_group: myResourceGroup
name: testserver
sku:
name: B_Gen5_1
tier: Basic
location: eastus
storage_mb: 1024
enforce_ssl: True
version: 5.6
admin_username: cloudsa
admin_password: password
'''
RETURN = '''
id:
description:
- Resource ID.
returned: always
type: str
sample: /subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.DBforMySQL/servers/mysqlsrv1b6dd89593
version:
description:
- Server version. Possible values include C(5.6), C(5.7).
returned: always
type: str
sample: 5.6
state:
description:
- A state of a server that is visible to user. Possible values include C(Ready), C(Dropping), C(Disabled).
returned: always
type: str
sample: Ready
fully_qualified_domain_name:
description:
- The fully qualified domain name of a server.
returned: always
type: str
sample: mysqlsrv1b6dd89593.mysql.database.azure.com
'''
import time
from ansible.module_utils.azure_rm_common import AzureRMModuleBase
try:
from azure.mgmt.rdbms.mysql import MySQLManagementClient
from msrestazure.azure_exceptions import CloudError
from msrest.polling import LROPoller
from msrest.serialization import Model
except ImportError:
# This is handled in azure_rm_common
pass
class Actions:
NoAction, Create, Update, Delete = range(4)
class AzureRMMySqlServers(AzureRMModuleBase):
"""Configuration class for an Azure RM MySQL Server resource"""
def __init__(self):
self.module_arg_spec = dict(
resource_group=dict(
type='str',
required=True
),
name=dict(
type='str',
required=True
),
sku=dict(
type='dict'
),
location=dict(
type='str'
),
storage_mb=dict(
type='int'
),
version=dict(
type='str',
choices=['5.6', '5.7']
),
enforce_ssl=dict(
type='bool',
default=False
),
create_mode=dict(
type='str',
default='Default'
),
admin_username=dict(
type='str'
),
admin_password=dict(
type='str',
no_log=True
),
state=dict(
type='str',
default='present',
choices=['present', 'absent']
)
)
self.resource_group = None
self.name = None
self.parameters = dict()
self.tags = None
self.results = dict(changed=False)
self.state = None
self.to_do = Actions.NoAction
super(AzureRMMySqlServers, self).__init__(derived_arg_spec=self.module_arg_spec,
supports_check_mode=True,
supports_tags=True)
def exec_module(self, **kwargs):
"""Main module execution method"""
for key in list(self.module_arg_spec.keys()) + ['tags']:
if hasattr(self, key):
setattr(self, key, kwargs[key])
elif kwargs[key] is not None:
if key == "sku":
ev = kwargs[key]
if 'tier' in ev:
if ev['tier'] == 'basic':
ev['tier'] = 'Basic'
elif ev['tier'] == 'standard':
ev['tier'] = 'Standard'
self.parameters["sku"] = ev
elif key == "location":
self.parameters["location"] = kwargs[key]
elif key == "storage_mb":
self.parameters.setdefault("properties", {}).setdefault("storage_profile", {})["storage_mb"] = kwargs[key]
elif key == "version":
self.parameters.setdefault("properties", {})["version"] = kwargs[key]
elif key == "enforce_ssl":
self.parameters.setdefault("properties", {})["ssl_enforcement"] = 'Enabled' if kwargs[key] else 'Disabled'
elif key == "create_mode":
self.parameters.setdefault("properties", {})["create_mode"] = kwargs[key]
elif key == "admin_username":
self.parameters.setdefault("properties", {})["administrator_login"] = kwargs[key]
elif key == "admin_password":
self.parameters.setdefault("properties", {})["administrator_login_password"] = kwargs[key]
old_response = None
response = None
resource_group = self.get_resource_group(self.resource_group)
if "location" not in self.parameters:
self.parameters["location"] = resource_group.location
old_response = self.get_mysqlserver()
if not old_response:
self.log("MySQL Server instance doesn't exist")
if self.state == 'absent':
self.log("Old instance didn't exist")
else:
self.to_do = Actions.Create
else:
self.log("MySQL Server instance already exists")
if self.state == 'absent':
self.to_do = Actions.Delete
elif self.state == 'present':
self.log("Need to check if MySQL Server instance has to be deleted or may be updated")
update_tags, newtags = self.update_tags(old_response.get('tags', {}))
if update_tags:
self.tags = newtags
self.to_do = Actions.Update
if (self.to_do == Actions.Create) or (self.to_do == Actions.Update):
self.log("Need to Create / Update the MySQL Server instance")
if self.check_mode:
self.results['changed'] = True
return self.results
response = self.create_update_mysqlserver()
if not old_response:
self.results['changed'] = True
else:
self.results['changed'] = old_response.__ne__(response)
self.log("Creation / Update done")
elif self.to_do == Actions.Delete:
self.log("MySQL Server instance deleted")
self.results['changed'] = True
if self.check_mode:
return self.results
self.delete_mysqlserver()
# make sure instance is actually deleted, for some Azure resources, instance is hanging around
# for some time after deletion -- this should be really fixed in Azure
while self.get_mysqlserver():
time.sleep(20)
else:
self.log("MySQL Server instance unchanged")
self.results['changed'] = False
response = old_response
if response:
self.results["id"] = response["id"]
self.results["version"] = response["version"]
self.results["state"] = response["user_visible_state"]
self.results["fully_qualified_domain_name"] = response["fully_qualified_domain_name"]
return self.results
def create_update_mysqlserver(self):
'''
Creates or updates MySQL Server with the specified configuration.
:return: deserialized MySQL Server instance state dictionary
'''
self.log("Creating / Updating the MySQL Server instance {0}".format(self.name))
try:
self.parameters['tags'] = self.tags
if self.to_do == Actions.Create:
response = self.mysql_client.servers.create(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
else:
# structure of parameters for update must be changed
self.parameters.update(self.parameters.pop("properties", {}))
response = self.mysql_client.servers.update(resource_group_name=self.resource_group,
server_name=self.name,
parameters=self.parameters)
if isinstance(response, LROPoller):
response = self.get_poller_result(response)
except CloudError as exc:
self.log('Error attempting to create the MySQL Server instance.')
self.fail("Error creating the MySQL Server instance: {0}".format(str(exc)))
return response.as_dict()
def delete_mysqlserver(self):
'''
Deletes specified MySQL Server instance in the specified subscription and resource group.
:return: True
'''
self.log("Deleting the MySQL Server instance {0}".format(self.name))
try:
response = self.mysql_client.servers.delete(resource_group_name=self.resource_group,
server_name=self.name)
except CloudError as e:
self.log('Error attempting to delete the MySQL Server instance.')
self.fail("Error deleting the MySQL Server instance: {0}".format(str(e)))
return True
def get_mysqlserver(self):
'''
Gets the properties of the specified MySQL Server.
:return: deserialized MySQL Server instance state dictionary
'''
self.log("Checking if the MySQL Server instance {0} is present".format(self.name))
found = False
try:
response = self.mysql_client.servers.get(resource_group_name=self.resource_group,
server_name=self.name)
found = True
self.log("Response : {0}".format(response))
self.log("MySQL Server instance : {0} found".format(response.name))
except CloudError as e:
self.log('Did not find the MySQL Server instance.')
if found is True:
return response.as_dict()
return False
def main():
"""Main execution"""
AzureRMMySqlServers()
if __name__ == '__main__':
main()
| gpl-3.0 |
eamuntz/Django-Tut | env/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py | 1093 | 8936 | # Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
# Copyright 2009 Raymond Hettinger, released under the MIT License.
# http://code.activestate.com/recipes/576693/
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self)==len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
| mit |
cfarquhar/rpc-openstack | hacking/setup.py | 9 | 1326 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import setuptools
version = '1.0.0'
setuptools.setup(
name='rpco-hacking-checks',
author='Rackspace Private Cloud',
description='Hacking/Flake8 checks for rpc-openstack',
version=version,
install_requires=['hacking'],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
],
py_modules=['rpco_checks'],
provides=['rpco_checks'],
entry_points={
'flake8.extension': [
'rpco.git_title_bug = rpco_checks:OnceGitCheckCommitTitleBug',
('rpco.git_title_length = '
'rpco_checks:OnceGitCheckCommitTitleLength'),
('rpco.git_title_period = '
'rpco_checks:OnceGitCheckCommitTitlePeriodEnding'),
]
},
)
| apache-2.0 |
Cito/sqlalchemy | test/orm/test_utils.py | 6 | 22402 | from sqlalchemy.testing import assert_raises, assert_raises_message
from sqlalchemy.orm import util as orm_util
from sqlalchemy import Column
from sqlalchemy import util
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import Table
from sqlalchemy.orm import aliased, with_polymorphic, synonym
from sqlalchemy.orm import mapper, create_session, Session
from sqlalchemy.testing import fixtures
from test.orm import _fixtures
from sqlalchemy.testing import eq_, is_
from sqlalchemy.orm.path_registry import PathRegistry, RootRegistry
from sqlalchemy import inspect
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.testing import AssertsCompiledSQL
class AliasedClassTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = 'default'
def _fixture(self, cls, properties={}):
table = Table('point', MetaData(),
Column('id', Integer(), primary_key=True),
Column('x', Integer),
Column('y', Integer))
mapper(cls, table, properties=properties)
return table
def test_simple(self):
class Point(object):
pass
table = self._fixture(Point)
alias = aliased(Point)
assert alias.id
assert alias.x
assert alias.y
assert Point.id.__clause_element__().table is table
assert alias.id.__clause_element__().table is not table
def test_not_instantiatable(self):
class Point(object):
pass
table = self._fixture(Point)
alias = aliased(Point)
assert_raises(TypeError, alias)
def test_instancemethod(self):
class Point(object):
def zero(self):
self.x, self.y = 0, 0
table = self._fixture(Point)
alias = aliased(Point)
assert Point.zero
# TODO: I don't quite understand this
# still
if util.py2k:
assert not getattr(alias, 'zero')
else:
assert getattr(alias, 'zero')
def test_classmethod(self):
class Point(object):
@classmethod
def max_x(cls):
return 100
table = self._fixture(Point)
alias = aliased(Point)
assert Point.max_x
assert alias.max_x
assert Point.max_x() == alias.max_x() == 100
def test_simple_property(self):
class Point(object):
@property
def max_x(self):
return 100
table = self._fixture(Point)
alias = aliased(Point)
assert Point.max_x
assert Point.max_x != 100
assert alias.max_x
assert Point.max_x is alias.max_x
def test_descriptors(self):
class descriptor(object):
def __init__(self, fn):
self.fn = fn
def __get__(self, obj, owner):
if obj is not None:
return self.fn(obj, obj)
else:
return self
def method(self):
return 'method'
class Point(object):
center = (0, 0)
@descriptor
def thing(self, arg):
return arg.center
table = self._fixture(Point)
alias = aliased(Point)
assert Point.thing != (0, 0)
assert Point().thing == (0, 0)
assert Point.thing.method() == 'method'
assert alias.thing != (0, 0)
assert alias.thing.method() == 'method'
def _assert_has_table(self, expr, table):
from sqlalchemy import Column # override testlib's override
for child in expr.get_children():
if isinstance(child, Column):
assert child.table is table
def test_hybrid_descriptor_one(self):
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
@hybrid_method
def left_of(self, other):
return self.x < other.x
self._fixture(Point)
alias = aliased(Point)
sess = Session()
self.assert_compile(
sess.query(alias).filter(alias.left_of(Point)),
"SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
"point_1.y AS point_1_y FROM point AS point_1, point "
"WHERE point_1.x < point.x"
)
def test_hybrid_descriptor_two(self):
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
@hybrid_property
def double_x(self):
return self.x * 2
self._fixture(Point)
alias = aliased(Point)
eq_(str(Point.double_x), "point.x * :x_1")
eq_(str(alias.double_x), "point_1.x * :x_1")
sess = Session()
self.assert_compile(
sess.query(alias).filter(alias.double_x > Point.x),
"SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
"point_1.y AS point_1_y FROM point AS point_1, point "
"WHERE point_1.x * :x_1 > point.x"
)
def test_hybrid_descriptor_three(self):
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
@hybrid_property
def x_alone(self):
return self.x
self._fixture(Point)
alias = aliased(Point)
eq_(str(Point.x_alone), "Point.x")
eq_(str(alias.x_alone), "AliasedClass_Point.x")
assert Point.x_alone is Point.x
eq_(str(alias.x_alone == alias.x), "point_1.x = point_1.x")
a2 = aliased(Point)
eq_(str(a2.x_alone == alias.x), "point_1.x = point_2.x")
sess = Session()
self.assert_compile(
sess.query(alias).filter(alias.x_alone > Point.x),
"SELECT point_1.id AS point_1_id, point_1.x AS point_1_x, "
"point_1.y AS point_1_y FROM point AS point_1, point "
"WHERE point_1.x > point.x"
)
def test_proxy_descriptor_one(self):
class Point(object):
def __init__(self, x, y):
self.x, self.y = x, y
self._fixture(Point, properties={
'x_syn': synonym("x")
})
alias = aliased(Point)
eq_(str(Point.x_syn), "Point.x_syn")
eq_(str(alias.x_syn), "AliasedClass_Point.x_syn")
sess = Session()
self.assert_compile(
sess.query(alias.x_syn).filter(alias.x_syn > Point.x_syn),
"SELECT point_1.x AS point_1_x FROM point AS point_1, point "
"WHERE point_1.x > point.x"
)
class IdentityKeyTest(_fixtures.FixtureTest):
run_inserts = None
def test_identity_key_1(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
key = orm_util.identity_key(User, [1])
eq_(key, (User, (1,)))
key = orm_util.identity_key(User, ident=[1])
eq_(key, (User, (1,)))
def test_identity_key_scalar(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
key = orm_util.identity_key(User, 1)
eq_(key, (User, (1,)))
key = orm_util.identity_key(User, ident=1)
eq_(key, (User, (1,)))
def test_identity_key_2(self):
users, User = self.tables.users, self.classes.User
mapper(User, users)
s = create_session()
u = User(name='u1')
s.add(u)
s.flush()
key = orm_util.identity_key(instance=u)
eq_(key, (User, (u.id,)))
def test_identity_key_3(self):
User, users = self.classes.User, self.tables.users
mapper(User, users)
row = {users.c.id: 1, users.c.name: "Frank"}
key = orm_util.identity_key(User, row=row)
eq_(key, (User, (1,)))
class PathRegistryTest(_fixtures.FixtureTest):
run_setup_mappers = 'once'
run_inserts = None
run_deletes = None
@classmethod
def setup_mappers(cls):
cls._setup_stock_mapping()
def test_root_registry(self):
umapper = inspect(self.classes.User)
is_(
RootRegistry()[umapper],
umapper._path_registry
)
eq_(
RootRegistry()[umapper],
PathRegistry.coerce((umapper,))
)
def test_expand(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper,))
eq_(
path[umapper.attrs.addresses][amapper]
[amapper.attrs.email_address],
PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
)
def test_entity_boolean(self):
umapper = inspect(self.classes.User)
path = PathRegistry.coerce((umapper,))
is_(bool(path), True)
def test_key_boolean(self):
umapper = inspect(self.classes.User)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses))
is_(bool(path), True)
def test_aliased_class(self):
User = self.classes.User
ua = aliased(User)
ua_insp = inspect(ua)
path = PathRegistry.coerce((ua_insp, ua_insp.mapper.attrs.addresses))
assert path.parent.is_aliased_class
def test_indexed_entity(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
is_(path[0], umapper)
is_(path[2], amapper)
def test_indexed_key(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
eq_(path[1], umapper.attrs.addresses)
eq_(path[3], amapper.attrs.email_address)
def test_slice(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
path = PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
eq_(path[1:3], (umapper.attrs.addresses, amapper))
def test_addition(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((amapper, amapper.attrs.email_address))
eq_(
p1 + p2,
PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
)
def test_length(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
pneg1 = PathRegistry.coerce(())
p0 = PathRegistry.coerce((umapper,))
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
eq_(len(pneg1), 0)
eq_(len(p0), 1)
eq_(len(p1), 2)
eq_(len(p2), 3)
eq_(len(p3), 4)
eq_(pneg1.length, 0)
eq_(p0.length, 1)
eq_(p1.length, 2)
eq_(p2.length, 3)
eq_(p3.length, 4)
def test_eq(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
u_alias = inspect(aliased(self.classes.User))
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p3 = PathRegistry.coerce((umapper, umapper.attrs.name))
p4 = PathRegistry.coerce((u_alias, umapper.attrs.addresses))
p5 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p6 = PathRegistry.coerce((amapper, amapper.attrs.user, umapper,
umapper.attrs.addresses))
p7 = PathRegistry.coerce((amapper, amapper.attrs.user, umapper,
umapper.attrs.addresses,
amapper, amapper.attrs.email_address))
is_(p1 == p2, True)
is_(p1 == p3, False)
is_(p1 == p4, False)
is_(p1 == p5, False)
is_(p6 == p7, False)
is_(p6 == p7.parent.parent, True)
is_(p1 != p2, False)
is_(p1 != p3, True)
is_(p1 != p4, True)
is_(p1 != p5, True)
def test_contains_mapper(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
assert p1.contains_mapper(umapper)
assert not p1.contains_mapper(amapper)
def test_path(self):
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((amapper, amapper.attrs.email_address))
eq_(
p1.path, (umapper, umapper.attrs.addresses)
)
eq_(
p2.path, (umapper, umapper.attrs.addresses, amapper)
)
eq_(
p3.path, (amapper, amapper.attrs.email_address)
)
def test_registry_set(self):
reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((amapper, amapper.attrs.email_address))
p1.set(reg, "p1key", "p1value")
p2.set(reg, "p2key", "p2value")
p3.set(reg, "p3key", "p3value")
eq_(
reg,
{
('p1key', p1.path): 'p1value',
('p2key', p2.path): 'p2value',
('p3key', p3.path): 'p3value',
}
)
def test_registry_get(self):
reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((amapper, amapper.attrs.email_address))
reg.update(
{
('p1key', p1.path): 'p1value',
('p2key', p2.path): 'p2value',
('p3key', p3.path): 'p3value',
}
)
eq_(p1.get(reg, "p1key"), "p1value")
eq_(p2.get(reg, "p2key"), "p2value")
eq_(p2.get(reg, "p1key"), None)
eq_(p3.get(reg, "p3key"), "p3value")
eq_(p3.get(reg, "p1key"), None)
def test_registry_contains(self):
reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((amapper, amapper.attrs.email_address))
reg.update(
{
('p1key', p1.path): 'p1value',
('p2key', p2.path): 'p2value',
('p3key', p3.path): 'p3value',
}
)
assert p1.contains(reg, "p1key")
assert not p1.contains(reg, "p2key")
assert p3.contains(reg, "p3key")
assert not p2.contains(reg, "fake")
def test_registry_setdefault(self):
reg = {}
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
reg.update(
{
('p1key', p1.path): 'p1value',
}
)
p1.setdefault(reg, "p1key", "p1newvalue_a")
p1.setdefault(reg, "p1key_new", "p1newvalue_b")
p2.setdefault(reg, "p2key", "p2newvalue")
eq_(
reg,
{
('p1key', p1.path): 'p1value',
('p1key_new', p1.path): 'p1newvalue_b',
('p2key', p2.path): 'p2newvalue',
}
)
def test_serialize(self):
User = self.classes.User
Address = self.classes.Address
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper,
amapper.attrs.email_address))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
eq_(
p1.serialize(),
[(User, "addresses"), (Address, "email_address")]
)
eq_(
p2.serialize(),
[(User, "addresses"), (Address, None)]
)
eq_(
p3.serialize(),
[(User, "addresses")]
)
def test_deseralize(self):
User = self.classes.User
Address = self.classes.Address
umapper = inspect(self.classes.User)
amapper = inspect(self.classes.Address)
p1 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper,
amapper.attrs.email_address))
p2 = PathRegistry.coerce((umapper, umapper.attrs.addresses, amapper))
p3 = PathRegistry.coerce((umapper, umapper.attrs.addresses))
eq_(
PathRegistry.deserialize([(User, "addresses"),
(Address, "email_address")]),
p1
)
eq_(
PathRegistry.deserialize([(User, "addresses"), (Address, None)]),
p2
)
eq_(
PathRegistry.deserialize([(User, "addresses")]),
p3
)
from .inheritance import _poly_fixtures
class PathRegistryInhTest(_poly_fixtures._Polymorphic):
run_setup_mappers = 'once'
run_inserts = None
run_deletes = None
def test_plain(self):
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
pmapper = inspect(Person)
emapper = inspect(Engineer)
p1 = PathRegistry.coerce((pmapper, emapper.attrs.machines))
# given a mapper and an attribute on a subclass,
# the path converts what you get to be against that subclass
eq_(
p1.path,
(emapper, emapper.attrs.machines)
)
def test_plain_compound(self):
Company = _poly_fixtures.Company
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
cmapper = inspect(Company)
pmapper = inspect(Person)
emapper = inspect(Engineer)
p1 = PathRegistry.coerce((cmapper, cmapper.attrs.employees,
pmapper, emapper.attrs.machines))
# given a mapper and an attribute on a subclass,
# the path converts what you get to be against that subclass
eq_(
p1.path,
(cmapper, cmapper.attrs.employees, emapper, emapper.attrs.machines)
)
def test_plain_aliased(self):
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
emapper = inspect(Engineer)
p_alias = aliased(Person)
p_alias = inspect(p_alias)
p1 = PathRegistry.coerce((p_alias, emapper.attrs.machines))
# plain AliasedClass - the path keeps that AliasedClass directly
# as is in the path
eq_(
p1.path,
(p_alias, emapper.attrs.machines)
)
def test_plain_aliased_compound(self):
Company = _poly_fixtures.Company
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
cmapper = inspect(Company)
emapper = inspect(Engineer)
c_alias = aliased(Company)
p_alias = aliased(Person)
c_alias = inspect(c_alias)
p_alias = inspect(p_alias)
p1 = PathRegistry.coerce((c_alias, cmapper.attrs.employees,
p_alias, emapper.attrs.machines))
# plain AliasedClass - the path keeps that AliasedClass directly
# as is in the path
eq_(
p1.path,
(c_alias, cmapper.attrs.employees, p_alias, emapper.attrs.machines)
)
def test_with_poly_sub(self):
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
emapper = inspect(Engineer)
p_poly = with_polymorphic(Person, [Engineer])
e_poly = inspect(p_poly.Engineer)
p_poly = inspect(p_poly)
p1 = PathRegistry.coerce((p_poly, emapper.attrs.machines))
# polymorphic AliasedClass - the path uses _entity_for_mapper()
# to get the most specific sub-entity
eq_(
p1.path,
(e_poly, emapper.attrs.machines)
)
def test_with_poly_base(self):
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
pmapper = inspect(Person)
emapper = inspect(Engineer)
p_poly = with_polymorphic(Person, [Engineer])
p_poly = inspect(p_poly)
# "name" is actually on Person, not Engineer
p1 = PathRegistry.coerce((p_poly, emapper.attrs.name))
# polymorphic AliasedClass - because "name" is on Person,
# we get Person, not Engineer
eq_(
p1.path,
(p_poly, pmapper.attrs.name)
)
def test_with_poly_use_mapper(self):
Person = _poly_fixtures.Person
Engineer = _poly_fixtures.Engineer
emapper = inspect(Engineer)
p_poly = with_polymorphic(Person, [Engineer], _use_mapper_path=True)
p_poly = inspect(p_poly)
p1 = PathRegistry.coerce((p_poly, emapper.attrs.machines))
# polymorphic AliasedClass with the "use_mapper_path" flag -
# the AliasedClass acts just like the base mapper
eq_(
p1.path,
(emapper, emapper.attrs.machines)
)
| mit |
hsaputra/tensorflow | tensorflow/python/kernel_tests/unique_op_test.py | 9 | 4967 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.kernels.unique_op."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.platform import test
class UniqueTest(test.TestCase):
def testInt32(self):
x = np.random.randint(2, high=10, size=7000)
with self.test_session() as sess:
y, idx = array_ops.unique(x)
tf_y, tf_idx = sess.run([y, idx])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]])
def testInt32OutIdxInt64(self):
x = np.random.randint(2, high=10, size=7000)
with self.test_session() as sess:
y, idx = array_ops.unique(x, out_idx=dtypes.int64)
tf_y, tf_idx = sess.run([y, idx])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]])
def testString(self):
indx = np.random.randint(65, high=122, size=7000)
x = [chr(i) for i in indx]
with self.test_session() as sess:
y, idx = array_ops.unique(x)
tf_y, tf_idx = sess.run([y, idx])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]].decode('ascii'))
def testInt32Axis(self):
x = np.array([[1, 0, 0], [1, 0, 0], [2, 0, 0]])
with self.test_session() as sess:
y0, idx0 = gen_array_ops.unique_v2(x, axis=[0])
tf_y0, tf_idx0 = sess.run([y0, idx0])
y1, idx1 = gen_array_ops.unique_v2(x, axis=[1])
tf_y1, tf_idx1 = sess.run([y1, idx1])
self.assertAllEqual(tf_y0, np.array([[1, 0, 0], [2, 0, 0]]))
self.assertAllEqual(tf_idx0, np.array([0, 0, 1]))
self.assertAllEqual(tf_y1, np.array([[1, 0], [1, 0], [2, 0]]))
self.assertAllEqual(tf_idx1, np.array([0, 1, 1]))
def testInt32V2(self):
# This test is only temporary, once V2 is used
# by default, the axis will be wrapped to allow `axis=None`.
x = np.random.randint(2, high=10, size=7000)
with self.test_session() as sess:
y, idx = gen_array_ops.unique_v2(x, axis=[])
tf_y, tf_idx = sess.run([y, idx])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]])
class UniqueWithCountsTest(test.TestCase):
def testInt32(self):
x = np.random.randint(2, high=10, size=7000)
with self.test_session() as sess:
y, idx, count = array_ops.unique_with_counts(x)
tf_y, tf_idx, tf_count = sess.run([y, idx, count])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]])
for value, count in zip(tf_y, tf_count):
self.assertEqual(count, np.sum(x == value))
def testInt32OutIdxInt64(self):
x = np.random.randint(2, high=10, size=7000)
with self.test_session() as sess:
y, idx, count = array_ops.unique_with_counts(x, out_idx=dtypes.int64)
tf_y, tf_idx, tf_count = sess.run([y, idx, count])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]])
for value, count in zip(tf_y, tf_count):
self.assertEqual(count, np.sum(x == value))
def testString(self):
indx = np.random.randint(65, high=122, size=7000)
x = [chr(i) for i in indx]
with self.test_session() as sess:
y, idx, count = array_ops.unique_with_counts(x)
tf_y, tf_idx, tf_count = sess.run([y, idx, count])
self.assertEqual(len(x), len(tf_idx))
self.assertEqual(len(tf_y), len(np.unique(x)))
for i in range(len(x)):
self.assertEqual(x[i], tf_y[tf_idx[i]].decode('ascii'))
for value, count in zip(tf_y, tf_count):
v = [1 if x[i] == value.decode('ascii') else 0 for i in range(7000)]
self.assertEqual(count, sum(v))
if __name__ == '__main__':
test.main()
| apache-2.0 |
wfxiang08/changes | migrations/versions/403b3fb41569_set_on_delete_cascad.py | 4 | 1688 | """Set ON DELETE CASCADE on Build.*
Revision ID: 403b3fb41569
Revises: 4732741c7696
Create Date: 2013-12-23 16:07:02.202873
"""
# revision identifiers, used by Alembic.
revision = '403b3fb41569'
down_revision = '4732741c7696'
from alembic import op
def upgrade():
op.drop_constraint('build_author_id_fkey', 'build')
op.create_foreign_key('build_author_id_fkey', 'build', 'author', ['author_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('build_change_id_fkey', 'build')
op.create_foreign_key('build_change_id_fkey', 'build', 'change', ['change_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('build_patch_id_fkey', 'build')
op.create_foreign_key('build_patch_id_fkey', 'build', 'patch', ['patch_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('build_project_id_fkey', 'build')
op.create_foreign_key('build_project_id_fkey', 'build', 'project', ['project_id'], ['id'], ondelete='CASCADE')
op.drop_constraint('build_repository_id_fkey', 'build')
op.create_foreign_key('build_repository_id_fkey', 'build', 'repository', ['repository_id'], ['id'], ondelete='CASCADE')
# add missing constraints
op.create_foreign_key('build_family_id_fkey', 'build', 'buildfamily', ['family_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key('build_source_id_fkey', 'build', 'source', ['source_id'], ['id'], ondelete='CASCADE')
op.create_foreign_key('build_parent_id_fkey', 'build', 'build', ['parent_id'], ['id'], ondelete='CASCADE')
def downgrade():
op.drop_constraint('build_family_id_fkey', 'build')
op.drop_constraint('build_source_id_fkey', 'build')
op.drop_constraint('build_parent_id_fkey', 'build')
| apache-2.0 |
tensorflow/transform | tensorflow_transform/saved/saved_model_loader_test.py | 1 | 1510 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for saved_model_loader."""
import os
import tempfile
import tensorflow as tf
from tensorflow_transform.saved import saved_transform_io
import unittest
def _create_test_saved_model_dir():
export_path = os.path.join(tempfile.mkdtemp(), 'export')
with tf.compat.v1.Graph().as_default():
with tf.compat.v1.Session().as_default() as session:
input_float = tf.compat.v1.placeholder(tf.float32, shape=[1])
output = (input_float - 2.0) / 5.0
inputs = {'x': input_float}
outputs = {'x_scaled': output}
saved_transform_io.write_saved_transform_from_session(
session, inputs, outputs, export_path)
return export_path
class SavedModelLoaderTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls._test_saved_model_dir = _create_test_saved_model_dir()
# This class has no tests at the moment.
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
wfxiang08/sqlalchemy | lib/sqlalchemy/ext/baked.py | 40 | 16224 | # sqlalchemy/ext/baked.py
# Copyright (C) 2005-2015 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Baked query extension.
Provides a creational pattern for the :class:`.query.Query` object which
allows the fully constructed object, Core select statement, and string
compiled result to be fully cached.
"""
from ..orm.query import Query
from ..orm import strategies, attributes, properties, \
strategy_options, util as orm_util, interfaces
from .. import log as sqla_log
from ..sql import util as sql_util
from ..orm import exc as orm_exc
from .. import exc as sa_exc
from .. import util
import copy
import logging
log = logging.getLogger(__name__)
class BakedQuery(object):
"""A builder object for :class:`.query.Query` objects."""
__slots__ = 'steps', '_bakery', '_cache_key', '_spoiled'
def __init__(self, bakery, initial_fn, args=()):
self._cache_key = ()
self._update_cache_key(initial_fn, args)
self.steps = [initial_fn]
self._spoiled = False
self._bakery = bakery
@classmethod
def bakery(cls, size=200):
"""Construct a new bakery."""
_bakery = util.LRUCache(size)
def call(initial_fn, *args):
return cls(_bakery, initial_fn, args)
return call
def _clone(self):
b1 = BakedQuery.__new__(BakedQuery)
b1._cache_key = self._cache_key
b1.steps = list(self.steps)
b1._bakery = self._bakery
b1._spoiled = self._spoiled
return b1
def _update_cache_key(self, fn, args=()):
self._cache_key += (fn.__code__,) + args
def __iadd__(self, other):
if isinstance(other, tuple):
self.add_criteria(*other)
else:
self.add_criteria(other)
return self
def __add__(self, other):
if isinstance(other, tuple):
return self.with_criteria(*other)
else:
return self.with_criteria(other)
def add_criteria(self, fn, *args):
"""Add a criteria function to this :class:`.BakedQuery`.
This is equivalent to using the ``+=`` operator to
modify a :class:`.BakedQuery` in-place.
"""
self._update_cache_key(fn, args)
self.steps.append(fn)
return self
def with_criteria(self, fn, *args):
"""Add a criteria function to a :class:`.BakedQuery` cloned from this one.
This is equivalent to using the ``+`` operator to
produce a new :class:`.BakedQuery` with modifications.
"""
return self._clone().add_criteria(fn, *args)
def for_session(self, session):
"""Return a :class:`.Result` object for this :class:`.BakedQuery`.
This is equivalent to calling the :class:`.BakedQuery` as a
Python callable, e.g. ``result = my_baked_query(session)``.
"""
return Result(self, session)
def __call__(self, session):
return self.for_session(session)
def spoil(self, full=False):
"""Cancel any query caching that will occur on this BakedQuery object.
The BakedQuery can continue to be used normally, however additional
creational functions will not be cached; they will be called
on every invocation.
This is to support the case where a particular step in constructing
a baked query disqualifies the query from being cacheable, such
as a variant that relies upon some uncacheable value.
:param full: if False, only functions added to this
:class:`.BakedQuery` object subsequent to the spoil step will be
non-cached; the state of the :class:`.BakedQuery` up until
this point will be pulled from the cache. If True, then the
entire :class:`.Query` object is built from scratch each
time, with all creational functions being called on each
invocation.
"""
if not full:
_spoil_point = self._clone()
_spoil_point._cache_key += ('_query_only', )
self.steps = [_spoil_point._retrieve_baked_query]
self._spoiled = True
return self
def _retrieve_baked_query(self, session):
query = self._bakery.get(self._cache_key, None)
if query is None:
query = self._as_query(session)
self._bakery[self._cache_key] = query.with_session(None)
return query.with_session(session)
def _bake(self, session):
query = self._as_query(session)
context = query._compile_context()
self._bake_subquery_loaders(session, context)
context.session = None
context.query = query = context.query.with_session(None)
query._execution_options = query._execution_options.union(
{"compiled_cache": self._bakery}
)
# we'll be holding onto the query for some of its state,
# so delete some compilation-use-only attributes that can take up
# space
for attr in (
'_correlate', '_from_obj', '_mapper_adapter_map',
'_joinpath', '_joinpoint'):
query.__dict__.pop(attr, None)
self._bakery[self._cache_key] = context
return context
def _as_query(self, session):
query = self.steps[0](session)
for step in self.steps[1:]:
query = step(query)
return query
def _bake_subquery_loaders(self, session, context):
"""convert subquery eager loaders in the cache into baked queries.
For subquery eager loading to work, all we need here is that the
Query point to the correct session when it is run. However, since
we are "baking" anyway, we may as well also turn the query into
a "baked" query so that we save on performance too.
"""
context.attributes['baked_queries'] = baked_queries = []
for k, v in list(context.attributes.items()):
if isinstance(v, Query):
if 'subquery' in k:
bk = BakedQuery(self._bakery, lambda *args: v)
bk._cache_key = self._cache_key + k
bk._bake(session)
baked_queries.append((k, bk._cache_key, v))
del context.attributes[k]
def _unbake_subquery_loaders(self, session, context, params):
"""Retrieve subquery eager loaders stored by _bake_subquery_loaders
and turn them back into Result objects that will iterate just
like a Query object.
"""
for k, cache_key, query in context.attributes["baked_queries"]:
bk = BakedQuery(self._bakery, lambda sess: query.with_session(sess))
bk._cache_key = cache_key
context.attributes[k] = bk.for_session(session).params(**params)
class Result(object):
"""Invokes a :class:`.BakedQuery` against a :class:`.Session`.
The :class:`.Result` object is where the actual :class:`.query.Query`
object gets created, or retrieved from the cache,
against a target :class:`.Session`, and is then invoked for results.
"""
__slots__ = 'bq', 'session', '_params'
def __init__(self, bq, session):
self.bq = bq
self.session = session
self._params = {}
def params(self, *args, **kw):
"""Specify parameters to be replaced into the string SQL statement."""
if len(args) == 1:
kw.update(args[0])
elif len(args) > 0:
raise sa_exc.ArgumentError(
"params() takes zero or one positional argument, "
"which is a dictionary.")
self._params.update(kw)
return self
def _as_query(self):
return self.bq._as_query(self.session).params(self._params)
def __str__(self):
return str(self._as_query())
def __iter__(self):
bq = self.bq
if bq._spoiled:
return iter(self._as_query())
baked_context = bq._bakery.get(bq._cache_key, None)
if baked_context is None:
baked_context = bq._bake(self.session)
context = copy.copy(baked_context)
context.session = self.session
context.attributes = context.attributes.copy()
bq._unbake_subquery_loaders(self.session, context, self._params)
context.statement.use_labels = True
if context.autoflush and not context.populate_existing:
self.session._autoflush()
return context.query.params(self._params).\
with_session(self.session)._execute_and_instances(context)
def first(self):
"""Return the first row.
Equivalent to :meth:`.Query.first`.
"""
bq = self.bq.with_criteria(lambda q: q.slice(0, 1))
ret = list(bq.for_session(self.session).params(self._params))
if len(ret) > 0:
return ret[0]
else:
return None
def one(self):
"""Return exactly one result or raise an exception.
Equivalent to :meth:`.Query.one`.
"""
ret = list(self)
l = len(ret)
if l == 1:
return ret[0]
elif l == 0:
raise orm_exc.NoResultFound("No row was found for one()")
else:
raise orm_exc.MultipleResultsFound(
"Multiple rows were found for one()")
def all(self):
"""Return all rows.
Equivalent to :meth:`.Query.all`.
"""
return list(self)
def get(self, ident):
"""Retrieve an object based on identity.
Equivalent to :meth:`.Query.get`.
"""
query = self.bq.steps[0](self.session)
return query._get_impl(ident, self._load_on_ident)
def _load_on_ident(self, query, key):
"""Load the given identity key from the database."""
ident = key[1]
mapper = query._mapper_zero()
_get_clause, _get_params = mapper._get_clause
def setup(query):
_lcl_get_clause = _get_clause
q = query._clone()
q._get_condition()
q._order_by = None
# None present in ident - turn those comparisons
# into "IS NULL"
if None in ident:
nones = set([
_get_params[col].key for col, value in
zip(mapper.primary_key, ident) if value is None
])
_lcl_get_clause = sql_util.adapt_criterion_to_null(
_lcl_get_clause, nones)
_lcl_get_clause = q._adapt_clause(_lcl_get_clause, True, False)
q._criterion = _lcl_get_clause
return q
# cache the query against a key that includes
# which positions in the primary key are NULL
# (remember, we can map to an OUTER JOIN)
bq = self.bq
bq = bq.with_criteria(setup, tuple(elem is None for elem in ident))
params = dict([
(_get_params[primary_key].key, id_val)
for id_val, primary_key in zip(ident, mapper.primary_key)
])
result = list(bq.for_session(self.session).params(**params))
l = len(result)
if l > 1:
raise orm_exc.MultipleResultsFound()
elif l:
return result[0]
else:
return None
def bake_lazy_loaders():
"""Enable the use of baked queries for all lazyloaders systemwide.
This operation should be safe for all lazy loaders, and will reduce
Python overhead for these operations.
"""
strategies.LazyLoader._strategy_keys[:] = []
BakedLazyLoader._strategy_keys[:] = []
properties.RelationshipProperty.strategy_for(
lazy="select")(BakedLazyLoader)
properties.RelationshipProperty.strategy_for(
lazy=True)(BakedLazyLoader)
properties.RelationshipProperty.strategy_for(
lazy="baked_select")(BakedLazyLoader)
def unbake_lazy_loaders():
"""Disable the use of baked queries for all lazyloaders systemwide.
This operation reverts the changes produced by :func:`.bake_lazy_loaders`.
"""
strategies.LazyLoader._strategy_keys[:] = []
BakedLazyLoader._strategy_keys[:] = []
properties.RelationshipProperty.strategy_for(
lazy="select")(strategies.LazyLoader)
properties.RelationshipProperty.strategy_for(
lazy=True)(strategies.LazyLoader)
properties.RelationshipProperty.strategy_for(
lazy="baked_select")(BakedLazyLoader)
assert strategies.LazyLoader._strategy_keys
@sqla_log.class_logger
@properties.RelationshipProperty.strategy_for(lazy="baked_select")
class BakedLazyLoader(strategies.LazyLoader):
def _emit_lazyload(self, session, state, ident_key, passive):
q = BakedQuery(
self.mapper._compiled_cache,
lambda session: session.query(self.mapper))
q.add_criteria(
lambda q: q._adapt_all_clauses()._with_invoke_all_eagers(False),
self.parent_property)
if not self.parent_property.bake_queries:
q.spoil(full=True)
if self.parent_property.secondary is not None:
q.add_criteria(
lambda q:
q.select_from(self.mapper, self.parent_property.secondary))
pending = not state.key
# don't autoflush on pending
if pending or passive & attributes.NO_AUTOFLUSH:
q.add_criteria(lambda q: q.autoflush(False))
if state.load_path:
q.spoil()
q.add_criteria(
lambda q:
q._with_current_path(state.load_path[self.parent_property]))
if state.load_options:
q.spoil()
q.add_criteria(
lambda q: q._conditional_options(*state.load_options))
if self.use_get:
return q(session)._load_on_ident(
session.query(self.mapper), ident_key)
if self.parent_property.order_by:
q.add_criteria(
lambda q:
q.order_by(*util.to_list(self.parent_property.order_by)))
for rev in self.parent_property._reverse_property:
# reverse props that are MANYTOONE are loading *this*
# object from get(), so don't need to eager out to those.
if rev.direction is interfaces.MANYTOONE and \
rev._use_get and \
not isinstance(rev.strategy, strategies.LazyLoader):
q.add_criteria(
lambda q:
q.options(
strategy_options.Load(
rev.parent).baked_lazyload(rev.key)))
lazy_clause, params = self._generate_lazy_clause(state, passive)
if pending:
if orm_util._none_set.intersection(params.values()):
return None
q.add_criteria(lambda q: q.filter(lazy_clause))
result = q(session).params(**params).all()
if self.uselist:
return result
else:
l = len(result)
if l:
if l > 1:
util.warn(
"Multiple rows returned with "
"uselist=False for lazily-loaded attribute '%s' "
% self.parent_property)
return result[0]
else:
return None
@strategy_options.loader_option()
def baked_lazyload(loadopt, attr):
"""Indicate that the given attribute should be loaded using "lazy"
loading with a "baked" query used in the load.
"""
return loadopt.set_relationship_strategy(attr, {"lazy": "baked_select"})
@baked_lazyload._add_unbound_fn
def baked_lazyload(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, False, {})
@baked_lazyload._add_unbound_all_fn
def baked_lazyload_all(*keys):
return strategy_options._UnboundLoad._from_keys(
strategy_options._UnboundLoad.baked_lazyload, keys, True, {})
baked_lazyload = baked_lazyload._unbound_fn
baked_lazyload_all = baked_lazyload_all._unbound_all_fn
bakery = BakedQuery.bakery
| mit |
noba3/KoTos | addons/plugin.video.mega/resources/lib/platform_libraries/Linux/32bit/Crypto/SelfTest/Cipher/test_CAST.py | 11 | 1940 | # -*- coding: utf-8 -*-
#
# SelfTest/Cipher/CAST.py: Self-test for the CAST-128 (CAST5) cipher
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Cipher.CAST"""
__revision__ = "$Id$"
# This is a list of (plaintext, ciphertext, key) tuples.
test_data = [
# Test vectors from RFC 2144, B.1
('0123456789abcdef', '238b4fe5847e44b2',
'0123456712345678234567893456789a',
'128-bit key'),
('0123456789abcdef', 'eb6a711a2c02271b',
'01234567123456782345',
'80-bit key'),
('0123456789abcdef', '7ac816d16e9b302e',
'0123456712',
'40-bit key'),
]
def get_tests(config={}):
from Crypto.Cipher import CAST
from common import make_block_tests
return make_block_tests(CAST, "CAST", test_data)
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| gpl-2.0 |
theheros/kbengine | kbe/res/scripts/common/Lib/tkinter/tix.py | 3 | 79654 | # -*-mode: python; fill-column: 75; tab-width: 8 -*-
#
# $Id$
#
# Tix.py -- Tix widget wrappers.
#
# For Tix, see http://tix.sourceforge.net
#
# - Sudhir Shenoy (sshenoy@gol.com), Dec. 1995.
# based on an idea of Jean-Marc Lugrin (lugrin@ms.com)
#
# NOTE: In order to minimize changes to Tkinter.py, some of the code here
# (TixWidget.__init__) has been taken from Tkinter (Widget.__init__)
# and will break if there are major changes in Tkinter.
#
# The Tix widgets are represented by a class hierarchy in python with proper
# inheritance of base classes.
#
# As a result after creating a 'w = StdButtonBox', I can write
# w.ok['text'] = 'Who Cares'
# or w.ok['bg'] = w['bg']
# or even w.ok.invoke()
# etc.
#
# Compare the demo tixwidgets.py to the original Tcl program and you will
# appreciate the advantages.
#
from tkinter import *
from tkinter import _flatten, _cnfmerge, _default_root
# WARNING - TkVersion is a limited precision floating point number
if TkVersion < 3.999:
raise ImportError("This version of Tix.py requires Tk 4.0 or higher")
import _tkinter # If this fails your Python may not be configured for Tk
# Some more constants (for consistency with Tkinter)
WINDOW = 'window'
TEXT = 'text'
STATUS = 'status'
IMMEDIATE = 'immediate'
IMAGE = 'image'
IMAGETEXT = 'imagetext'
BALLOON = 'balloon'
AUTO = 'auto'
ACROSSTOP = 'acrosstop'
# A few useful constants for the Grid widget
ASCII = 'ascii'
CELL = 'cell'
COLUMN = 'column'
DECREASING = 'decreasing'
INCREASING = 'increasing'
INTEGER = 'integer'
MAIN = 'main'
MAX = 'max'
REAL = 'real'
ROW = 'row'
S_REGION = 's-region'
X_REGION = 'x-region'
Y_REGION = 'y-region'
# Some constants used by Tkinter dooneevent()
TCL_DONT_WAIT = 1 << 1
TCL_WINDOW_EVENTS = 1 << 2
TCL_FILE_EVENTS = 1 << 3
TCL_TIMER_EVENTS = 1 << 4
TCL_IDLE_EVENTS = 1 << 5
TCL_ALL_EVENTS = 0
# BEWARE - this is implemented by copying some code from the Widget class
# in Tkinter (to override Widget initialization) and is therefore
# liable to break.
import tkinter, os
# Could probably add this to Tkinter.Misc
class tixCommand:
"""The tix commands provide access to miscellaneous elements
of Tix's internal state and the Tix application context.
Most of the information manipulated by these commands pertains
to the application as a whole, or to a screen or
display, rather than to a particular window.
This is a mixin class, assumed to be mixed to Tkinter.Tk
that supports the self.tk.call method.
"""
def tix_addbitmapdir(self, directory):
"""Tix maintains a list of directories under which
the tix_getimage and tix_getbitmap commands will
search for image files. The standard bitmap directory
is $TIX_LIBRARY/bitmaps. The addbitmapdir command
adds directory into this list. By using this
command, the image files of an applications can
also be located using the tix_getimage or tix_getbitmap
command.
"""
return self.tk.call('tix', 'addbitmapdir', directory)
def tix_cget(self, option):
"""Returns the current value of the configuration
option given by option. Option may be any of the
options described in the CONFIGURATION OPTIONS section.
"""
return self.tk.call('tix', 'cget', option)
def tix_configure(self, cnf=None, **kw):
"""Query or modify the configuration options of the Tix application
context. If no option is specified, returns a dictionary all of the
available options. If option is specified with no value, then the
command returns a list describing the one named option (this list
will be identical to the corresponding sublist of the value
returned if no option is specified). If one or more option-value
pairs are specified, then the command modifies the given option(s)
to have the given value(s); in this case the command returns an
empty string. Option may be any of the configuration options.
"""
# Copied from Tkinter.py
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(self.tk.call('tix', 'configure')):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if isinstance(cnf, StringType):
x = self.tk.split(self.tk.call('tix', 'configure', '-'+cnf))
return (x[0][1:],) + x[1:]
return self.tk.call(('tix', 'configure') + self._options(cnf))
def tix_filedialog(self, dlgclass=None):
"""Returns the file selection dialog that may be shared among
different calls from this application. This command will create a
file selection dialog widget when it is called the first time. This
dialog will be returned by all subsequent calls to tix_filedialog.
An optional dlgclass parameter can be passed to specified what type
of file selection dialog widget is desired. Possible options are
tix FileSelectDialog or tixExFileSelectDialog.
"""
if dlgclass is not None:
return self.tk.call('tix', 'filedialog', dlgclass)
else:
return self.tk.call('tix', 'filedialog')
def tix_getbitmap(self, name):
"""Locates a bitmap file of the name name.xpm or name in one of the
bitmap directories (see the tix_addbitmapdir command above). By
using tix_getbitmap, you can avoid hard coding the pathnames of the
bitmap files in your application. When successful, it returns the
complete pathname of the bitmap file, prefixed with the character
'@'. The returned value can be used to configure the -bitmap
option of the TK and Tix widgets.
"""
return self.tk.call('tix', 'getbitmap', name)
def tix_getimage(self, name):
"""Locates an image file of the name name.xpm, name.xbm or name.ppm
in one of the bitmap directories (see the addbitmapdir command
above). If more than one file with the same name (but different
extensions) exist, then the image type is chosen according to the
depth of the X display: xbm images are chosen on monochrome
displays and color images are chosen on color displays. By using
tix_ getimage, you can avoid hard coding the pathnames of the
image files in your application. When successful, this command
returns the name of the newly created image, which can be used to
configure the -image option of the Tk and Tix widgets.
"""
return self.tk.call('tix', 'getimage', name)
def tix_option_get(self, name):
"""Gets the options maintained by the Tix
scheme mechanism. Available options include:
active_bg active_fg bg
bold_font dark1_bg dark1_fg
dark2_bg dark2_fg disabled_fg
fg fixed_font font
inactive_bg inactive_fg input1_bg
input2_bg italic_font light1_bg
light1_fg light2_bg light2_fg
menu_font output1_bg output2_bg
select_bg select_fg selector
"""
# could use self.tk.globalgetvar('tixOption', name)
return self.tk.call('tix', 'option', 'get', name)
def tix_resetoptions(self, newScheme, newFontSet, newScmPrio=None):
"""Resets the scheme and fontset of the Tix application to
newScheme and newFontSet, respectively. This affects only those
widgets created after this call. Therefore, it is best to call the
resetoptions command before the creation of any widgets in a Tix
application.
The optional parameter newScmPrio can be given to reset the
priority level of the Tk options set by the Tix schemes.
Because of the way Tk handles the X option database, after Tix has
been has imported and inited, it is not possible to reset the color
schemes and font sets using the tix config command. Instead, the
tix_resetoptions command must be used.
"""
if newScmPrio is not None:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet, newScmPrio)
else:
return self.tk.call('tix', 'resetoptions', newScheme, newFontSet)
class Tk(tkinter.Tk, tixCommand):
"""Toplevel widget of Tix which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
def __init__(self, screenName=None, baseName=None, className='Tix'):
tkinter.Tk.__init__(self, screenName, baseName, className)
tixlib = os.environ.get('TIX_LIBRARY')
self.tk.eval('global auto_path; lappend auto_path [file dir [info nameof]]')
if tixlib is not None:
self.tk.eval('global auto_path; lappend auto_path {%s}' % tixlib)
self.tk.eval('global tcl_pkgPath; lappend tcl_pkgPath {%s}' % tixlib)
# Load Tix - this should work dynamically or statically
# If it's static, tcl/tix8.1/pkgIndex.tcl should have
# 'load {} Tix'
# If it's dynamic under Unix, tcl/tix8.1/pkgIndex.tcl should have
# 'load libtix8.1.8.3.so Tix'
self.tk.eval('package require Tix')
def destroy(self):
# For safety, remove an delete_window binding before destroy
self.protocol("WM_DELETE_WINDOW", "")
tkinter.Tk.destroy(self)
# The Tix 'tixForm' geometry manager
class Form:
"""The Tix Form geometry manager
Widgets can be arranged by specifying attachments to other widgets.
See Tix documentation for complete details"""
def config(self, cnf={}, **kw):
self.tk.call('tixForm', self._w, *self._options(cnf, kw))
form = config
def __setitem__(self, key, value):
Form.form(self, {key: value})
def check(self):
return self.tk.call('tixForm', 'check', self._w)
def forget(self):
self.tk.call('tixForm', 'forget', self._w)
def grid(self, xsize=0, ysize=0):
if (not xsize) and (not ysize):
x = self.tk.call('tixForm', 'grid', self._w)
y = self.tk.splitlist(x)
z = ()
for x in y:
z = z + (self.tk.getint(x),)
return z
return self.tk.call('tixForm', 'grid', self._w, xsize, ysize)
def info(self, option=None):
if not option:
return self.tk.call('tixForm', 'info', self._w)
if option[0] != '-':
option = '-' + option
return self.tk.call('tixForm', 'info', self._w, option)
def slaves(self):
return [self._nametowidget(x) for x in
self.tk.splitlist(
self.tk.call(
'tixForm', 'slaves', self._w))]
tkinter.Widget.__bases__ = tkinter.Widget.__bases__ + (Form,)
class TixWidget(tkinter.Widget):
"""A TixWidget class is used to package all (or most) Tix widgets.
Widget initialization is extended in two ways:
1) It is possible to give a list of options which must be part of
the creation command (so called Tix 'static' options). These cannot be
given as a 'config' command later.
2) It is possible to give the name of an existing TK widget. These are
child widgets created automatically by a Tix mega-widget. The Tk call
to create these widgets is therefore bypassed in TixWidget.__init__
Both options are for use by subclasses only.
"""
def __init__ (self, master=None, widgetName=None,
static_options=None, cnf={}, kw={}):
# Merge keywords and dictionary arguments
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
# Move static options into extra. static_options must be
# a list of keywords (or None).
extra=()
# 'options' is always a static option
if static_options:
static_options.append('options')
else:
static_options = ['options']
for k,v in list(cnf.items()):
if k in static_options:
extra = extra + ('-' + k, v)
del cnf[k]
self.widgetName = widgetName
Widget._setup(self, master, cnf)
# If widgetName is None, this is a dummy creation call where the
# corresponding Tk widget has already been created by Tix
if widgetName:
self.tk.call(widgetName, self._w, *extra)
# Non-static options - to be done via a 'config' command
if cnf:
Widget.config(self, cnf)
# Dictionary to hold subwidget names for easier access. We can't
# use the children list because the public Tix names may not be the
# same as the pathname component
self.subwidget_list = {}
# We set up an attribute access function so that it is possible to
# do w.ok['text'] = 'Hello' rather than w.subwidget('ok')['text'] = 'Hello'
# when w is a StdButtonBox.
# We can even do w.ok.invoke() because w.ok is subclassed from the
# Button class if you go through the proper constructors
def __getattr__(self, name):
if name in self.subwidget_list:
return self.subwidget_list[name]
raise AttributeError(name)
def set_silent(self, value):
"""Set a variable without calling its action routine"""
self.tk.call('tixSetSilent', self._w, value)
def subwidget(self, name):
"""Return the named subwidget (which must have been created by
the sub-class)."""
n = self._subwidget_name(name)
if not n:
raise TclError("Subwidget " + name + " not child of " + self._name)
# Remove header of name and leading dot
n = n[len(self._w)+1:]
return self._nametowidget(n)
def subwidgets_all(self):
"""Return all subwidgets."""
names = self._subwidget_names()
if not names:
return []
retlist = []
for name in names:
name = name[len(self._w)+1:]
try:
retlist.append(self._nametowidget(name))
except:
# some of the widgets are unknown e.g. border in LabelFrame
pass
return retlist
def _subwidget_name(self,name):
"""Get a subwidget name (returns a String, not a Widget !)"""
try:
return self.tk.call(self._w, 'subwidget', name)
except TclError:
return None
def _subwidget_names(self):
"""Return the name of all subwidgets."""
try:
x = self.tk.call(self._w, 'subwidgets', '-all')
return self.tk.split(x)
except TclError:
return None
def config_all(self, option, value):
"""Set configuration options for all subwidgets (and self)."""
if option == '':
return
elif not isinstance(option, StringType):
option = repr(option)
if not isinstance(value, StringType):
value = repr(value)
names = self._subwidget_names()
for name in names:
self.tk.call(name, 'configure', '-' + option, value)
# These are missing from Tkinter
def image_create(self, imgtype, cnf={}, master=None, **kw):
if not master:
master = tkinter._default_root
if not master:
raise RuntimeError('Too early to create image')
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
return master.tk.call(('image', 'create', imgtype,) + options)
def image_delete(self, imgname):
try:
self.tk.call('image', 'delete', imgname)
except TclError:
# May happen if the root was destroyed
pass
# Subwidgets are child widgets created automatically by mega-widgets.
# In python, we have to create these subwidgets manually to mirror their
# existence in Tk/Tix.
class TixSubWidget(TixWidget):
"""Subwidget class.
This is used to mirror child widgets automatically created
by Tix/Tk as part of a mega-widget in Python (which is not informed
of this)"""
def __init__(self, master, name,
destroy_physically=1, check_intermediate=1):
if check_intermediate:
path = master._subwidget_name(name)
try:
path = path[len(master._w)+1:]
plist = path.split('.')
except:
plist = []
if not check_intermediate:
# immediate descendant
TixWidget.__init__(self, master, None, None, {'name' : name})
else:
# Ensure that the intermediate widgets exist
parent = master
for i in range(len(plist) - 1):
n = '.'.join(plist[:i+1])
try:
w = master._nametowidget(n)
parent = w
except KeyError:
# Create the intermediate widget
parent = TixSubWidget(parent, plist[i],
destroy_physically=0,
check_intermediate=0)
# The Tk widget name is in plist, not in name
if plist:
name = plist[-1]
TixWidget.__init__(self, parent, None, None, {'name' : name})
self.destroy_physically = destroy_physically
def destroy(self):
# For some widgets e.g., a NoteBook, when we call destructors,
# we must be careful not to destroy the frame widget since this
# also destroys the parent NoteBook thus leading to an exception
# in Tkinter when it finally calls Tcl to destroy the NoteBook
for c in list(self.children.values()): c.destroy()
if self._name in self.master.children:
del self.master.children[self._name]
if self._name in self.master.subwidget_list:
del self.master.subwidget_list[self._name]
if self.destroy_physically:
# This is bypassed only for a few widgets
self.tk.call('destroy', self._w)
# Useful func. to split Tcl lists and return as a dict. From Tkinter.py
def _lst2dict(lst):
dict = {}
for x in lst:
dict[x[0][1:]] = (x[0][1:],) + x[1:]
return dict
# Useful class to create a display style - later shared by many items.
# Contributed by Steffen Kremser
class DisplayStyle:
"""DisplayStyle - handle configuration options shared by
(multiple) Display Items"""
def __init__(self, itemtype, cnf={}, **kw):
master = _default_root # global from Tkinter
if not master and 'refwindow' in cnf: master=cnf['refwindow']
elif not master and 'refwindow' in kw: master= kw['refwindow']
elif not master: raise RuntimeError("Too early to create display style: no root window")
self.tk = master.tk
self.stylename = self.tk.call('tixDisplayStyle', itemtype,
*self._options(cnf,kw) )
def __str__(self):
return self.stylename
def _options(self, cnf, kw):
if kw and cnf:
cnf = _cnfmerge((cnf, kw))
elif kw:
cnf = kw
opts = ()
for k, v in cnf.items():
opts = opts + ('-'+k, v)
return opts
def delete(self):
self.tk.call(self.stylename, 'delete')
def __setitem__(self,key,value):
self.tk.call(self.stylename, 'configure', '-%s'%key, value)
def config(self, cnf={}, **kw):
return _lst2dict(
self.tk.split(
self.tk.call(
self.stylename, 'configure', *self._options(cnf,kw))))
def __getitem__(self,key):
return self.tk.call(self.stylename, 'cget', '-%s'%key)
######################################################
### The Tix Widget classes - in alphabetical order ###
######################################################
class Balloon(TixWidget):
"""Balloon help widget.
Subwidget Class
--------- -----
label Label
message Message"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master=None, cnf={}, **kw):
# static seem to be -installcolormap -initwait -statusbar -cursor
static = ['options', 'installcolormap', 'initwait', 'statusbar',
'cursor']
TixWidget.__init__(self, master, 'tixBalloon', static, cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label',
destroy_physically=0)
self.subwidget_list['message'] = _dummyLabel(self, 'message',
destroy_physically=0)
def bind_widget(self, widget, cnf={}, **kw):
"""Bind balloon widget to another.
One balloon widget may be bound to several widgets at the same time"""
self.tk.call(self._w, 'bind', widget._w, *self._options(cnf, kw))
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
class ButtonBox(TixWidget):
"""ButtonBox - A container for pushbuttons.
Subwidgets are the buttons added with the add method.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixButtonBox',
['orientation', 'options'], cnf, kw)
def add(self, name, cnf={}, **kw):
"""Add a button with given name to box."""
btn = self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return btn
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class ComboBox(TixWidget):
"""ComboBox - an Entry field with a dropdown menu. The user can select a
choice by either typing in the entry subwidget or selecting from the
listbox subwidget.
Subwidget Class
--------- -----
entry Entry
arrow Button
slistbox ScrolledListBox
tick Button
cross Button : present if created with the fancy option"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixComboBox',
['editable', 'dropdown', 'fancy', 'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
# align
def add_history(self, str):
self.tk.call(self._w, 'addhistory', str)
def append_history(self, str):
self.tk.call(self._w, 'appendhistory', str)
def insert(self, index, str):
self.tk.call(self._w, 'insert', index, str)
def pick(self, index):
self.tk.call(self._w, 'pick', index)
class Control(TixWidget):
"""Control - An entry field with value change arrows. The user can
adjust the value by pressing the two arrow buttons or by entering
the value directly into the entry. The new value will be checked
against the user-defined upper and lower limits.
Subwidget Class
--------- -----
incr Button
decr Button
entry Entry
label Label"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__ (self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixControl', ['options'], cnf, kw)
self.subwidget_list['incr'] = _dummyButton(self, 'incr')
self.subwidget_list['decr'] = _dummyButton(self, 'decr')
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def decrement(self):
self.tk.call(self._w, 'decr')
def increment(self):
self.tk.call(self._w, 'incr')
def invoke(self):
self.tk.call(self._w, 'invoke')
def update(self):
self.tk.call(self._w, 'update')
class DirList(TixWidget):
"""DirList - displays a list view of a directory, its previous
directories and its sub-directories. The user can choose one of
the directories displayed in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirList', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirTree(TixWidget):
"""DirTree - Directory Listing in a hierarchical view.
Displays a tree view of a directory, its previous directories and its
sub-directories. The user can choose one of the directories displayed
in the list or change to another directory.
Subwidget Class
--------- -----
hlist HList
hsb Scrollbar
vsb Scrollbar"""
# FIXME: It should inherit -superclass tixScrolledHList
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirTree', ['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def chdir(self, dir):
self.tk.call(self._w, 'chdir', dir)
class DirSelectBox(TixWidget):
"""DirSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class ExFileSelectBox(TixWidget):
"""ExFileSelectBox - MS Windows style file select box.
It provides an convenient method for the user to select files.
Subwidget Class
--------- -----
cancel Button
ok Button
hidden Checkbutton
types ComboBox
dir ComboBox
file ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
def filter(self):
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class DirSelectDialog(TixWidget):
"""The DirSelectDialog widget presents the directories in the file
system in a dialog window. The user can use this dialog window to
navigate through the file system to select the desired directory.
Subwidgets Class
---------- -----
dirbox DirSelectDialog"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixDirSelectDialog',
['options'], cnf, kw)
self.subwidget_list['dirbox'] = _dummyDirSelectBox(self, 'dirbox')
# cancel and ok buttons are missing
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
# Should inherit from a Dialog class
class ExFileSelectDialog(TixWidget):
"""ExFileSelectDialog - MS Windows style file select dialog.
It provides an convenient method for the user to select files.
Subwidgets Class
---------- -----
fsbox ExFileSelectBox"""
# FIXME: It should inherit -superclass tixDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixExFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['fsbox'] = _dummyExFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileSelectBox(TixWidget):
"""ExFileSelectBox - Motif style file select box.
It is generally used for
the user to choose a file. FileSelectBox stores the files mostly
recently selected into a ComboBox widget so that they can be quickly
selected again.
Subwidget Class
--------- -----
selection ComboBox
filter ComboBox
dirlist ScrolledListBox
filelist ScrolledListBox"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectBox', ['options'], cnf, kw)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
def apply_filter(self): # name of subwidget is same as command
self.tk.call(self._w, 'filter')
def invoke(self):
self.tk.call(self._w, 'invoke')
# Should inherit from a Dialog class
class FileSelectDialog(TixWidget):
"""FileSelectDialog - Motif style file select dialog.
Subwidgets Class
---------- -----
btns StdButtonBox
fsbox FileSelectBox"""
# FIXME: It should inherit -superclass tixStdDialogShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileSelectDialog',
['options'], cnf, kw)
self.subwidget_list['btns'] = _dummyStdButtonBox(self, 'btns')
self.subwidget_list['fsbox'] = _dummyFileSelectBox(self, 'fsbox')
def popup(self):
self.tk.call(self._w, 'popup')
def popdown(self):
self.tk.call(self._w, 'popdown')
class FileEntry(TixWidget):
"""FileEntry - Entry field with button that invokes a FileSelectDialog.
The user can type in the filename manually. Alternatively, the user can
press the button widget that sits next to the entry, which will bring
up a file selection dialog.
Subwidgets Class
---------- -----
button Button
entry Entry"""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixFileEntry',
['dialogtype', 'options'], cnf, kw)
self.subwidget_list['button'] = _dummyButton(self, 'button')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
def invoke(self):
self.tk.call(self._w, 'invoke')
def file_dialog(self):
# FIXME: return python object
pass
class HList(TixWidget, XView, YView):
"""HList - Hierarchy display widget can be used to display any data
that have a hierarchical structure, for example, file system directory
trees. The list entries are indented and connected by branch lines
according to their places in the hierarchy.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixHList',
['columns', 'options'], cnf, kw)
def add(self, entry, cnf={}, **kw):
return self.tk.call(self._w, 'add', entry, *self._options(cnf, kw))
def add_child(self, parent=None, cnf={}, **kw):
if not parent:
parent = ''
return self.tk.call(
self._w, 'addchild', parent, *self._options(cnf, kw))
def anchor_set(self, entry):
self.tk.call(self._w, 'anchor', 'set', entry)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def column_width(self, col=0, width=None, chars=None):
if not chars:
return self.tk.call(self._w, 'column', 'width', col, width)
else:
return self.tk.call(self._w, 'column', 'width', col,
'-char', chars)
def delete_all(self):
self.tk.call(self._w, 'delete', 'all')
def delete_entry(self, entry):
self.tk.call(self._w, 'delete', 'entry', entry)
def delete_offsprings(self, entry):
self.tk.call(self._w, 'delete', 'offsprings', entry)
def delete_siblings(self, entry):
self.tk.call(self._w, 'delete', 'siblings', entry)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def header_create(self, col, cnf={}, **kw):
self.tk.call(self._w, 'header', 'create', col, *self._options(cnf, kw))
def header_configure(self, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'header', 'configure', col)))
self.tk.call(self._w, 'header', 'configure', col,
*self._options(cnf, kw))
def header_cget(self, col, opt):
return self.tk.call(self._w, 'header', 'cget', col, opt)
def header_exists(self, col):
return self.tk.call(self._w, 'header', 'exists', col)
def header_delete(self, col):
self.tk.call(self._w, 'header', 'delete', col)
def header_size(self, col):
return self.tk.call(self._w, 'header', 'size', col)
def hide_entry(self, entry):
self.tk.call(self._w, 'hide', 'entry', entry)
def indicator_create(self, entry, cnf={}, **kw):
self.tk.call(
self._w, 'indicator', 'create', entry, *self._options(cnf, kw))
def indicator_configure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'indicator', 'configure', entry)))
self.tk.call(
self._w, 'indicator', 'configure', entry, *self._options(cnf, kw))
def indicator_cget(self, entry, opt):
return self.tk.call(self._w, 'indicator', 'cget', entry, opt)
def indicator_exists(self, entry):
return self.tk.call (self._w, 'indicator', 'exists', entry)
def indicator_delete(self, entry):
self.tk.call(self._w, 'indicator', 'delete', entry)
def indicator_size(self, entry):
return self.tk.call(self._w, 'indicator', 'size', entry)
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_bbox(self, entry):
return self._getints(
self.tk.call(self._w, 'info', 'bbox', entry)) or None
def info_children(self, entry=None):
c = self.tk.call(self._w, 'info', 'children', entry)
return self.tk.splitlist(c)
def info_data(self, entry):
return self.tk.call(self._w, 'info', 'data', entry)
def info_dragsite(self):
return self.tk.call(self._w, 'info', 'dragsite')
def info_dropsite(self):
return self.tk.call(self._w, 'info', 'dropsite')
def info_exists(self, entry):
return self.tk.call(self._w, 'info', 'exists', entry)
def info_hidden(self, entry):
return self.tk.call(self._w, 'info', 'hidden', entry)
def info_next(self, entry):
return self.tk.call(self._w, 'info', 'next', entry)
def info_parent(self, entry):
return self.tk.call(self._w, 'info', 'parent', entry)
def info_prev(self, entry):
return self.tk.call(self._w, 'info', 'prev', entry)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def item_cget(self, entry, col, opt):
return self.tk.call(self._w, 'item', 'cget', entry, col, opt)
def item_configure(self, entry, col, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'item', 'configure', entry, col)))
self.tk.call(self._w, 'item', 'configure', entry, col,
*self._options(cnf, kw))
def item_create(self, entry, col, cnf={}, **kw):
self.tk.call(
self._w, 'item', 'create', entry, col, *self._options(cnf, kw))
def item_exists(self, entry, col):
return self.tk.call(self._w, 'item', 'exists', entry, col)
def item_delete(self, entry, col):
self.tk.call(self._w, 'item', 'delete', entry, col)
def entrycget(self, entry, opt):
return self.tk.call(self._w, 'entrycget', entry, opt)
def entryconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'entryconfigure', entry)))
self.tk.call(self._w, 'entryconfigure', entry,
*self._options(cnf, kw))
def nearest(self, y):
return self.tk.call(self._w, 'nearest', y)
def see(self, entry):
self.tk.call(self._w, 'see', entry)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, entry):
return self.tk.call(self._w, 'selection', 'includes', entry)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
def show_entry(self, entry):
return self.tk.call(self._w, 'show', 'entry', entry)
class InputOnly(TixWidget):
"""InputOnly - Invisible widget. Unix only.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixInputOnly', None, cnf, kw)
class LabelEntry(TixWidget):
"""LabelEntry - Entry field with label. Packages an entry widget
and a label into one mega widget. It can beused be used to simplify
the creation of ``entry-form'' type of interface.
Subwidgets Class
---------- -----
label Label
entry Entry"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelEntry',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
class LabelFrame(TixWidget):
"""LabelFrame - Labelled Frame container. Packages a frame widget
and a label into one mega widget. To create widgets inside a
LabelFrame widget, one creates the new widgets relative to the
frame subwidget and manage them inside the frame subwidget.
Subwidgets Class
---------- -----
label Label
frame Frame"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixLabelFrame',
['labelside','options'], cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['frame'] = _dummyFrame(self, 'frame')
class ListNoteBook(TixWidget):
"""A ListNoteBook widget is very similar to the TixNoteBook widget:
it can be used to display many windows in a limited space using a
notebook metaphor. The notebook is divided into a stack of pages
(windows). At one time only one of these pages can be shown.
The user can navigate through these pages by
choosing the name of the desired page in the hlist subwidget."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixListNoteBook', ['options'], cnf, kw)
# Is this necessary? It's not an exposed subwidget in Tix.
self.subwidget_list['pane'] = _dummyPanedWindow(self, 'pane',
destroy_physically=0)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['shlist'] = _dummyScrolledHList(self, 'shlist')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
class Meter(TixWidget):
"""The Meter widget can be used to show the progress of a background
job which may take a long time to execute.
"""
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixMeter',
['options'], cnf, kw)
class NoteBook(TixWidget):
"""NoteBook - Multi-page container widget (tabbed notebook metaphor).
Subwidgets Class
---------- -----
nbframe NoteBookFrame
<pages> page widgets added dynamically with the add method"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self,master,'tixNoteBook', ['options'], cnf, kw)
self.subwidget_list['nbframe'] = TixSubWidget(self, 'nbframe',
destroy_physically=0)
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def page(self, name):
return self.subwidget(name)
def pages(self):
# Can't call subwidgets_all directly because we don't want .nbframe
names = self.tk.split(self.tk.call(self._w, 'pages'))
ret = []
for x in names:
ret.append(self.subwidget(x))
return ret
def raise_page(self, name): # raise is a python keyword
self.tk.call(self._w, 'raise', name)
def raised(self):
return self.tk.call(self._w, 'raised')
class NoteBookFrame(TixWidget):
# FIXME: This is dangerous to expose to be called on its own.
pass
class OptionMenu(TixWidget):
"""OptionMenu - creates a menu button of options.
Subwidget Class
--------- -----
menubutton Menubutton
menu Menu"""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixOptionMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def add_command(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'command', name, *self._options(cnf, kw))
def add_separator(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', 'separator', name, *self._options(cnf, kw))
def delete(self, name):
self.tk.call(self._w, 'delete', name)
def disable(self, name):
self.tk.call(self._w, 'disable', name)
def enable(self, name):
self.tk.call(self._w, 'enable', name)
class PanedWindow(TixWidget):
"""PanedWindow - Multi-pane container widget
allows the user to interactively manipulate the sizes of several
panes. The panes can be arranged either vertically or horizontally.The
user changes the sizes of the panes by dragging the resize handle
between two panes.
Subwidgets Class
---------- -----
<panes> g/p widgets added dynamically with the add method."""
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPanedWindow', ['orientation', 'options'], cnf, kw)
# add delete forget panecget paneconfigure panes setsize
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = TixSubWidget(self, name,
check_intermediate=0)
return self.subwidget_list[name]
def delete(self, name):
self.tk.call(self._w, 'delete', name)
self.subwidget_list[name].destroy()
del self.subwidget_list[name]
def forget(self, name):
self.tk.call(self._w, 'forget', name)
def panecget(self, entry, opt):
return self.tk.call(self._w, 'panecget', entry, opt)
def paneconfigure(self, entry, cnf={}, **kw):
if cnf is None:
return _lst2dict(
self.tk.split(
self.tk.call(self._w, 'paneconfigure', entry)))
self.tk.call(self._w, 'paneconfigure', entry, *self._options(cnf, kw))
def panes(self):
names = self.tk.splitlist(self.tk.call(self._w, 'panes'))
return [self.subwidget(x) for x in names]
class PopupMenu(TixWidget):
"""PopupMenu widget can be used as a replacement of the tk_popup command.
The advantage of the Tix PopupMenu widget is it requires less application
code to manipulate.
Subwidgets Class
---------- -----
menubutton Menubutton
menu Menu"""
# FIXME: It should inherit -superclass tixShell
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixPopupMenu', ['options'], cnf, kw)
self.subwidget_list['menubutton'] = _dummyMenubutton(self, 'menubutton')
self.subwidget_list['menu'] = _dummyMenu(self, 'menu')
def bind_widget(self, widget):
self.tk.call(self._w, 'bind', widget._w)
def unbind_widget(self, widget):
self.tk.call(self._w, 'unbind', widget._w)
def post_widget(self, widget, x, y):
self.tk.call(self._w, 'post', widget._w, x, y)
class ResizeHandle(TixWidget):
"""Internal widget to draw resize handles on Scrolled widgets."""
def __init__(self, master, cnf={}, **kw):
# There seems to be a Tix bug rejecting the configure method
# Let's try making the flags -static
flags = ['options', 'command', 'cursorfg', 'cursorbg',
'handlesize', 'hintcolor', 'hintwidth',
'x', 'y']
# In fact, x y height width are configurable
TixWidget.__init__(self, master, 'tixResizeHandle',
flags, cnf, kw)
def attach_widget(self, widget):
self.tk.call(self._w, 'attachwidget', widget._w)
def detach_widget(self, widget):
self.tk.call(self._w, 'detachwidget', widget._w)
def hide(self, widget):
self.tk.call(self._w, 'hide', widget._w)
def show(self, widget):
self.tk.call(self._w, 'show', widget._w)
class ScrolledHList(TixWidget):
"""ScrolledHList - HList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledHList', ['options'],
cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledListBox(TixWidget):
"""ScrolledListBox - Listbox with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledListBox', ['options'], cnf, kw)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledText(TixWidget):
"""ScrolledText - Text with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledText', ['options'], cnf, kw)
self.subwidget_list['text'] = _dummyText(self, 'text')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledTList(TixWidget):
"""ScrolledTList - TList with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledTList', ['options'],
cnf, kw)
self.subwidget_list['tlist'] = _dummyTList(self, 'tlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class ScrolledWindow(TixWidget):
"""ScrolledWindow - Window with automatic scrollbars."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixScrolledWindow', ['options'], cnf, kw)
self.subwidget_list['window'] = _dummyFrame(self, 'window')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class Select(TixWidget):
"""Select - Container of button subwidgets. It can be used to provide
radio-box or check-box style of selection options for the user.
Subwidgets are buttons added dynamically using the add method."""
# FIXME: It should inherit -superclass tixLabelWidget
def __init__(self, master, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixSelect',
['allowzero', 'radio', 'orientation', 'labelside',
'options'],
cnf, kw)
self.subwidget_list['label'] = _dummyLabel(self, 'label')
def add(self, name, cnf={}, **kw):
self.tk.call(self._w, 'add', name, *self._options(cnf, kw))
self.subwidget_list[name] = _dummyButton(self, name)
return self.subwidget_list[name]
def invoke(self, name):
self.tk.call(self._w, 'invoke', name)
class Shell(TixWidget):
"""Toplevel window.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixShell', ['options', 'title'], cnf, kw)
class DialogShell(TixWidget):
"""Toplevel window, with popup popdown and center methods.
It tells the window manager that it is a dialog window and should be
treated specially. The exact treatment depends on the treatment of
the window manager.
Subwidgets - None"""
# FIXME: It should inherit from Shell
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master,
'tixDialogShell',
['options', 'title', 'mapped',
'minheight', 'minwidth',
'parent', 'transient'], cnf, kw)
def popdown(self):
self.tk.call(self._w, 'popdown')
def popup(self):
self.tk.call(self._w, 'popup')
def center(self):
self.tk.call(self._w, 'center')
class StdButtonBox(TixWidget):
"""StdButtonBox - Standard Button Box (OK, Apply, Cancel and Help) """
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixStdButtonBox',
['orientation', 'options'], cnf, kw)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
def invoke(self, name):
if name in self.subwidget_list:
self.tk.call(self._w, 'invoke', name)
class TList(TixWidget, XView, YView):
"""TList - Hierarchy display widget which can be
used to display data in a tabular format. The list entries of a TList
widget are similar to the entries in the Tk listbox widget. The main
differences are (1) the TList widget can display the list entries in a
two dimensional format and (2) you can use graphical images as well as
multiple colors and fonts for the list entries.
Subwidgets - None"""
def __init__ (self,master=None,cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTList', ['options'], cnf, kw)
def active_set(self, index):
self.tk.call(self._w, 'active', 'set', index)
def active_clear(self):
self.tk.call(self._w, 'active', 'clear')
def anchor_set(self, index):
self.tk.call(self._w, 'anchor', 'set', index)
def anchor_clear(self):
self.tk.call(self._w, 'anchor', 'clear')
def delete(self, from_, to=None):
self.tk.call(self._w, 'delete', from_, to)
def dragsite_set(self, index):
self.tk.call(self._w, 'dragsite', 'set', index)
def dragsite_clear(self):
self.tk.call(self._w, 'dragsite', 'clear')
def dropsite_set(self, index):
self.tk.call(self._w, 'dropsite', 'set', index)
def dropsite_clear(self):
self.tk.call(self._w, 'dropsite', 'clear')
def insert(self, index, cnf={}, **kw):
self.tk.call(self._w, 'insert', index, *self._options(cnf, kw))
def info_active(self):
return self.tk.call(self._w, 'info', 'active')
def info_anchor(self):
return self.tk.call(self._w, 'info', 'anchor')
def info_down(self, index):
return self.tk.call(self._w, 'info', 'down', index)
def info_left(self, index):
return self.tk.call(self._w, 'info', 'left', index)
def info_right(self, index):
return self.tk.call(self._w, 'info', 'right', index)
def info_selection(self):
c = self.tk.call(self._w, 'info', 'selection')
return self.tk.splitlist(c)
def info_size(self):
return self.tk.call(self._w, 'info', 'size')
def info_up(self, index):
return self.tk.call(self._w, 'info', 'up', index)
def nearest(self, x, y):
return self.tk.call(self._w, 'nearest', x, y)
def see(self, index):
self.tk.call(self._w, 'see', index)
def selection_clear(self, cnf={}, **kw):
self.tk.call(self._w, 'selection', 'clear', *self._options(cnf, kw))
def selection_includes(self, index):
return self.tk.call(self._w, 'selection', 'includes', index)
def selection_set(self, first, last=None):
self.tk.call(self._w, 'selection', 'set', first, last)
class Tree(TixWidget):
"""Tree - The tixTree widget can be used to display hierarchical
data in a tree form. The user can adjust
the view of the tree by opening or closing parts of the tree."""
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixTree',
['options'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def setmode(self, entrypath, mode='none'):
'''This command is used to indicate whether the entry given by
entryPath has children entries and whether the children are visible. mode
must be one of open, close or none. If mode is set to open, a (+)
indicator is drawn next the the entry. If mode is set to close, a (-)
indicator is drawn next the the entry. If mode is set to none, no
indicators will be drawn for this entry. The default mode is none. The
open mode indicates the entry has hidden children and this entry can be
opened by the user. The close mode indicates that all the children of the
entry are now visible and the entry can be closed by the user.'''
self.tk.call(self._w, 'setmode', entrypath, mode)
# Could try subclassing Tree for CheckList - would need another arg to init
class CheckList(TixWidget):
"""The CheckList widget
displays a list of items to be selected by the user. CheckList acts
similarly to the Tk checkbutton or radiobutton widgets, except it is
capable of handling many more items than checkbuttons or radiobuttons.
"""
# FIXME: It should inherit -superclass tixTree
def __init__(self, master=None, cnf={}, **kw):
TixWidget.__init__(self, master, 'tixCheckList',
['options', 'radio'], cnf, kw)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
def autosetmode(self):
'''This command calls the setmode method for all the entries in this
Tree widget: if an entry has no child entries, its mode is set to
none. Otherwise, if the entry has any hidden child entries, its mode is
set to open; otherwise its mode is set to close.'''
self.tk.call(self._w, 'autosetmode')
def close(self, entrypath):
'''Close the entry given by entryPath if its mode is close.'''
self.tk.call(self._w, 'close', entrypath)
def getmode(self, entrypath):
'''Returns the current mode of the entry given by entryPath.'''
return self.tk.call(self._w, 'getmode', entrypath)
def open(self, entrypath):
'''Open the entry given by entryPath if its mode is open.'''
self.tk.call(self._w, 'open', entrypath)
def getselection(self, mode='on'):
'''Returns a list of items whose status matches status. If status is
not specified, the list of items in the "on" status will be returned.
Mode can be on, off, default'''
c = self.tk.split(self.tk.call(self._w, 'getselection', mode))
return self.tk.splitlist(c)
def getstatus(self, entrypath):
'''Returns the current status of entryPath.'''
return self.tk.call(self._w, 'getstatus', entrypath)
def setstatus(self, entrypath, mode='on'):
'''Sets the status of entryPath to be status. A bitmap will be
displayed next to the entry its status is on, off or default.'''
self.tk.call(self._w, 'setstatus', entrypath, mode)
###########################################################################
### The subclassing below is used to instantiate the subwidgets in each ###
### mega widget. This allows us to access their methods directly. ###
###########################################################################
class _dummyButton(Button, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyCheckbutton(Checkbutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyEntry(Entry, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyFrame(Frame, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyLabel(Label, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyListbox(Listbox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenu(Menu, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyMenubutton(Menubutton, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrollbar(Scrollbar, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyText(Text, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledListBox(ScrolledListBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['listbox'] = _dummyListbox(self, 'listbox')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyHList(HList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyScrolledHList(ScrolledHList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyTList(TList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, ['fancy',destroy_physically])
self.subwidget_list['label'] = _dummyLabel(self, 'label')
self.subwidget_list['entry'] = _dummyEntry(self, 'entry')
self.subwidget_list['arrow'] = _dummyButton(self, 'arrow')
self.subwidget_list['slistbox'] = _dummyScrolledListBox(self,
'slistbox')
try:
self.subwidget_list['tick'] = _dummyButton(self, 'tick')
#cross Button : present if created with the fancy option
self.subwidget_list['cross'] = _dummyButton(self, 'cross')
except TypeError:
# unavailable when -fancy not specified
pass
class _dummyDirList(DirList, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['hlist'] = _dummyHList(self, 'hlist')
self.subwidget_list['vsb'] = _dummyScrollbar(self, 'vsb')
self.subwidget_list['hsb'] = _dummyScrollbar(self, 'hsb')
class _dummyDirSelectBox(DirSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyDirList(self, 'dirlist')
self.subwidget_list['dircbx'] = _dummyFileComboBox(self, 'dircbx')
class _dummyExFileSelectBox(ExFileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['hidden'] = _dummyCheckbutton(self, 'hidden')
self.subwidget_list['types'] = _dummyComboBox(self, 'types')
self.subwidget_list['dir'] = _dummyComboBox(self, 'dir')
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['file'] = _dummyComboBox(self, 'file')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
class _dummyFileSelectBox(FileSelectBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dirlist'] = _dummyScrolledListBox(self, 'dirlist')
self.subwidget_list['filelist'] = _dummyScrolledListBox(self, 'filelist')
self.subwidget_list['filter'] = _dummyComboBox(self, 'filter')
self.subwidget_list['selection'] = _dummyComboBox(self, 'selection')
class _dummyFileComboBox(ComboBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['dircbx'] = _dummyComboBox(self, 'dircbx')
class _dummyStdButtonBox(StdButtonBox, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
self.subwidget_list['ok'] = _dummyButton(self, 'ok')
self.subwidget_list['apply'] = _dummyButton(self, 'apply')
self.subwidget_list['cancel'] = _dummyButton(self, 'cancel')
self.subwidget_list['help'] = _dummyButton(self, 'help')
class _dummyNoteBookFrame(NoteBookFrame, TixSubWidget):
def __init__(self, master, name, destroy_physically=0):
TixSubWidget.__init__(self, master, name, destroy_physically)
class _dummyPanedWindow(PanedWindow, TixSubWidget):
def __init__(self, master, name, destroy_physically=1):
TixSubWidget.__init__(self, master, name, destroy_physically)
########################
### Utility Routines ###
########################
#mike Should tixDestroy be exposed as a wrapper? - but not for widgets.
def OptionName(widget):
'''Returns the qualified path name for the widget. Normally used to set
default options for subwidgets. See tixwidgets.py'''
return widget.tk.call('tixOptionName', widget._w)
# Called with a dictionary argument of the form
# {'*.c':'C source files', '*.txt':'Text Files', '*':'All files'}
# returns a string which can be used to configure the fsbox file types
# in an ExFileSelectBox. i.e.,
# '{{*} {* - All files}} {{*.c} {*.c - C source files}} {{*.txt} {*.txt - Text Files}}'
def FileTypeList(dict):
s = ''
for type in dict.keys():
s = s + '{{' + type + '} {' + type + ' - ' + dict[type] + '}} '
return s
# Still to be done:
# tixIconView
class CObjView(TixWidget):
"""This file implements the Canvas Object View widget. This is a base
class of IconView. It implements automatic placement/adjustment of the
scrollbars according to the canvas objects inside the canvas subwidget.
The scrollbars are adjusted so that the canvas is just large enough
to see all the objects.
"""
# FIXME: It should inherit -superclass tixScrolledWidget
pass
class Grid(TixWidget, XView, YView):
'''The Tix Grid command creates a new window and makes it into a
tixGrid widget. Additional options, may be specified on the command
line or in the option database to configure aspects such as its cursor
and relief.
A Grid widget displays its contents in a two dimensional grid of cells.
Each cell may contain one Tix display item, which may be in text,
graphics or other formats. See the DisplayStyle class for more information
about Tix display items. Individual cells, or groups of cells, can be
formatted with a wide range of attributes, such as its color, relief and
border.
Subwidgets - None'''
# valid specific resources as of Tk 8.4
# editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd,
# highlightbackground, highlightcolor, leftmargin, itemtype, selectmode,
# selectunit, topmargin,
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw)
# valid options as of Tk 8.4
# anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget,
# edit, entryconfigure, format, geometryinfo, info, index, move, nearest,
# selection, set, size, unset, xview, yview
def anchor_clear(self):
"""Removes the selection anchor."""
self.tk.call(self, 'anchor', 'clear')
def anchor_get(self):
"Get the (x,y) coordinate of the current anchor cell"
return self._getints(self.tk.call(self, 'anchor', 'get'))
def anchor_set(self, x, y):
"""Set the selection anchor to the cell at (x, y)."""
self.tk.call(self, 'anchor', 'set', x, y)
def delete_row(self, from_, to=None):
"""Delete rows between from_ and to inclusive.
If to is not provided, delete only row at from_"""
if to is None:
self.tk.call(self, 'delete', 'row', from_)
else:
self.tk.call(self, 'delete', 'row', from_, to)
def delete_column(self, from_, to=None):
"""Delete columns between from_ and to inclusive.
If to is not provided, delete only column at from_"""
if to is None:
self.tk.call(self, 'delete', 'column', from_)
else:
self.tk.call(self, 'delete', 'column', from_, to)
def edit_apply(self):
"""If any cell is being edited, de-highlight the cell and applies
the changes."""
self.tk.call(self, 'edit', 'apply')
def edit_set(self, x, y):
"""Highlights the cell at (x, y) for editing, if the -editnotify
command returns True for this cell."""
self.tk.call(self, 'edit', 'set', x, y)
def entrycget(self, x, y, option):
"Get the option value for cell at (x,y)"
if option and option[0] != '-':
option = '-' + option
return self.tk.call(self, 'entrycget', x, y, option)
def entryconfigure(self, x, y, cnf=None, **kw):
return self._configure(('entryconfigure', x, y), cnf, kw)
# def format
# def index
def info_exists(self, x, y):
"Return True if display item exists at (x,y)"
return self._getboolean(self.tk.call(self, 'info', 'exists', x, y))
def info_bbox(self, x, y):
# This seems to always return '', at least for 'text' displayitems
return self.tk.call(self, 'info', 'bbox', x, y)
def move_column(self, from_, to, offset):
"""Moves the the range of columns from position FROM through TO by
the distance indicated by OFFSET. For example, move_column(2, 4, 1)
moves the columns 2,3,4 to columns 3,4,5."""
self.tk.call(self, 'move', 'column', from_, to, offset)
def move_row(self, from_, to, offset):
"""Moves the the range of rows from position FROM through TO by
the distance indicated by OFFSET.
For example, move_row(2, 4, 1) moves the rows 2,3,4 to rows 3,4,5."""
self.tk.call(self, 'move', 'row', from_, to, offset)
def nearest(self, x, y):
"Return coordinate of cell nearest pixel coordinate (x,y)"
return self._getints(self.tk.call(self, 'nearest', x, y))
# def selection adjust
# def selection clear
# def selection includes
# def selection set
# def selection toggle
def set(self, x, y, itemtype=None, **kw):
args= self._options(self.cnf, kw)
if itemtype is not None:
args= ('-itemtype', itemtype) + args
self.tk.call(self, 'set', x, y, *args)
def size_column(self, index, **kw):
"""Queries or sets the size of the column given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given column.
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all columns.
When no option-value pair is given, this command returns a tuple
containing the current size setting of the given column. When
option-value pairs are given, the corresponding options of the
size setting of the given column are changed. Options may be one
of the follwing:
pad0 pixels
Specifies the paddings to the left of a column.
pad1 pixels
Specifies the paddings to the right of a column.
size val
Specifies the width of a column .
Val may be: "auto" -- the width of the column is set the
the widest cell in the column; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the width of the column to the
given number of characters."""
return self.tk.split(self.tk.call(self._w, 'size', 'column', index,
*self._options({}, kw)))
def size_row(self, index, **kw):
"""Queries or sets the size of the row given by
INDEX. INDEX may be any non-negative
integer that gives the position of a given row .
INDEX can also be the string "default"; in this case, this command
queries or sets the default size of all rows.
When no option-value pair is given, this command returns a list con-
taining the current size setting of the given row . When option-value
pairs are given, the corresponding options of the size setting of the
given row are changed. Options may be one of the follwing:
pad0 pixels
Specifies the paddings to the top of a row.
pad1 pixels
Specifies the paddings to the the bottom of a row.
size val
Specifies the height of a row.
Val may be: "auto" -- the height of the row is set the
the highest cell in the row; a valid Tk screen distance
unit; or a real number following by the word chars
(e.g. 3.4chars) that sets the height of the row to the
given number of characters."""
return self.tk.split(self.tk.call(
self, 'size', 'row', index, *self._options({}, kw)))
def unset(self, x, y):
"""Clears the cell at (x, y) by removing its display item."""
self.tk.call(self._w, 'unset', x, y)
class ScrolledGrid(Grid):
'''Scrolled Grid widgets'''
# FIXME: It should inherit -superclass tixScrolledWidget
def __init__(self, master=None, cnf={}, **kw):
static= []
self.cnf= cnf
TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw)
| lgpl-3.0 |
chouseknecht/openshift-restclient-python | openshift/test/test_v1_rbd_persistent_volume_source.py | 1 | 4341 | # coding: utf-8
"""
OpenShift API (with Kubernetes)
OpenShift provides builds, application lifecycle, image content management, and administrative policy on top of Kubernetes. The API allows consistent management of those objects. All API operations are authenticated via an Authorization bearer token that is provided for service accounts as a generated secret (in JWT form) or via the native OAuth endpoint located at /oauth/authorize. Core infrastructure components may use openshift.client certificates that require no authentication. All API operations return a 'resourceVersion' string that represents the version of the object in the underlying storage. The standard LIST operation performs a snapshot read of the underlying objects, returning a resourceVersion representing a consistent version of the listed objects. The WATCH operation allows all updates to a set of objects after the provided resourceVersion to be observed by a openshift.client. By listing and beginning a watch from the returned resourceVersion, openshift.clients may observe a consistent view of the state of one or more objects. Note that WATCH always returns the update after the provided resourceVersion. Watch may be extended a limited time in the past - using etcd 2 the watch window is 1000 events (which on a large cluster may only be a few tens of seconds) so openshift.clients must explicitly handle the \"watch to old error\" by re-listing. Objects are divided into two rough categories - those that have a lifecycle and must reflect the state of the cluster, and those that have no state. Objects with lifecycle typically have three main sections: * 'metadata' common to all objects * a 'spec' that represents the desired state * a 'status' that represents how much of the desired state is reflected on the cluster at the current time Objects that have no state have 'metadata' but may lack a 'spec' or 'status' section. Objects are divided into those that are namespace scoped (only exist inside of a namespace) and those that are cluster scoped (exist outside of a namespace). A namespace scoped resource will be deleted when the namespace is deleted and cannot be created if the namespace has not yet been created or is in the process of deletion. Cluster scoped resources are typically only accessible to admins - resources like nodes, persistent volumes, and cluster policy. All objects have a schema that is a combination of the 'kind' and 'apiVersion' fields. This schema is additive only for any given version - no backwards incompatible changes are allowed without incrementing the apiVersion. The server will return and accept a number of standard responses that share a common schema - for instance, the common error type is 'metav1.Status' (described below) and will be returned on any error from the API server. The API is available in multiple serialization formats - the default is JSON (Accept: application/json and Content-Type: application/json) but openshift.clients may also use YAML (application/yaml) or the native Protobuf schema (application/vnd.kubernetes.protobuf). Note that the format of the WATCH API call is slightly different - for JSON it returns newline delimited objects while for Protobuf it returns length-delimited frames (4 bytes in network-order) that contain a 'versioned.Watch' Protobuf object. See the OpenShift documentation at https://docs.openshift.org for more information.
OpenAPI spec version: latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import openshift.client
from kubernetes.client.rest import ApiException
from openshift.client.models.v1_rbd_persistent_volume_source import V1RBDPersistentVolumeSource
class TestV1RBDPersistentVolumeSource(unittest.TestCase):
""" V1RBDPersistentVolumeSource unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1RBDPersistentVolumeSource(self):
"""
Test V1RBDPersistentVolumeSource
"""
# FIXME: construct object with mandatory attributes with example values
#model = openshift.client.models.v1_rbd_persistent_volume_source.V1RBDPersistentVolumeSource()
pass
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
zhukaixy/kbengine | kbe/src/lib/python/Lib/test/test_isinstance.py | 90 | 10171 | # Tests some corner cases with isinstance() and issubclass(). While these
# tests use new style classes and properties, they actually do whitebox
# testing of error conditions uncovered when using extension types.
import unittest
from test import support
import sys
class TestIsInstanceExceptions(unittest.TestCase):
# Test to make sure that an AttributeError when accessing the instance's
# class's bases is masked. This was actually a bug in Python 2.2 and
# 2.2.1 where the exception wasn't caught but it also wasn't being cleared
# (leading to an "undetected error" in the debug build). Set up is,
# isinstance(inst, cls) where:
#
# - cls isn't a type, or a tuple
# - cls has a __bases__ attribute
# - inst has a __class__ attribute
# - inst.__class__ as no __bases__ attribute
#
# Sounds complicated, I know, but this mimics a situation where an
# extension type raises an AttributeError when its __bases__ attribute is
# gotten. In that case, isinstance() should return False.
def test_class_has_no_bases(self):
class I(object):
def getclass(self):
# This must return an object that has no __bases__ attribute
return None
__class__ = property(getclass)
class C(object):
def getbases(self):
return ()
__bases__ = property(getbases)
self.assertEqual(False, isinstance(I(), C()))
# Like above except that inst.__class__.__bases__ raises an exception
# other than AttributeError
def test_bases_raises_other_than_attribute_error(self):
class E(object):
def getbases(self):
raise RuntimeError
__bases__ = property(getbases)
class I(object):
def getclass(self):
return E()
__class__ = property(getclass)
class C(object):
def getbases(self):
return ()
__bases__ = property(getbases)
self.assertRaises(RuntimeError, isinstance, I(), C())
# Here's a situation where getattr(cls, '__bases__') raises an exception.
# If that exception is not AttributeError, it should not get masked
def test_dont_mask_non_attribute_error(self):
class I: pass
class C(object):
def getbases(self):
raise RuntimeError
__bases__ = property(getbases)
self.assertRaises(RuntimeError, isinstance, I(), C())
# Like above, except that getattr(cls, '__bases__') raises an
# AttributeError, which /should/ get masked as a TypeError
def test_mask_attribute_error(self):
class I: pass
class C(object):
def getbases(self):
raise AttributeError
__bases__ = property(getbases)
self.assertRaises(TypeError, isinstance, I(), C())
# check that we don't mask non AttributeErrors
# see: http://bugs.python.org/issue1574217
def test_isinstance_dont_mask_non_attribute_error(self):
class C(object):
def getclass(self):
raise RuntimeError
__class__ = property(getclass)
c = C()
self.assertRaises(RuntimeError, isinstance, c, bool)
# test another code path
class D: pass
self.assertRaises(RuntimeError, isinstance, c, D)
# These tests are similar to above, but tickle certain code paths in
# issubclass() instead of isinstance() -- really PyObject_IsSubclass()
# vs. PyObject_IsInstance().
class TestIsSubclassExceptions(unittest.TestCase):
def test_dont_mask_non_attribute_error(self):
class C(object):
def getbases(self):
raise RuntimeError
__bases__ = property(getbases)
class S(C): pass
self.assertRaises(RuntimeError, issubclass, C(), S())
def test_mask_attribute_error(self):
class C(object):
def getbases(self):
raise AttributeError
__bases__ = property(getbases)
class S(C): pass
self.assertRaises(TypeError, issubclass, C(), S())
# Like above, but test the second branch, where the __bases__ of the
# second arg (the cls arg) is tested. This means the first arg must
# return a valid __bases__, and it's okay for it to be a normal --
# unrelated by inheritance -- class.
def test_dont_mask_non_attribute_error_in_cls_arg(self):
class B: pass
class C(object):
def getbases(self):
raise RuntimeError
__bases__ = property(getbases)
self.assertRaises(RuntimeError, issubclass, B, C())
def test_mask_attribute_error_in_cls_arg(self):
class B: pass
class C(object):
def getbases(self):
raise AttributeError
__bases__ = property(getbases)
self.assertRaises(TypeError, issubclass, B, C())
# meta classes for creating abstract classes and instances
class AbstractClass(object):
def __init__(self, bases):
self.bases = bases
def getbases(self):
return self.bases
__bases__ = property(getbases)
def __call__(self):
return AbstractInstance(self)
class AbstractInstance(object):
def __init__(self, klass):
self.klass = klass
def getclass(self):
return self.klass
__class__ = property(getclass)
# abstract classes
AbstractSuper = AbstractClass(bases=())
AbstractChild = AbstractClass(bases=(AbstractSuper,))
# normal classes
class Super:
pass
class Child(Super):
pass
# new-style classes
class NewSuper(object):
pass
class NewChild(NewSuper):
pass
class TestIsInstanceIsSubclass(unittest.TestCase):
# Tests to ensure that isinstance and issubclass work on abstract
# classes and instances. Before the 2.2 release, TypeErrors were
# raised when boolean values should have been returned. The bug was
# triggered by mixing 'normal' classes and instances were with
# 'abstract' classes and instances. This case tries to test all
# combinations.
def test_isinstance_normal(self):
# normal instances
self.assertEqual(True, isinstance(Super(), Super))
self.assertEqual(False, isinstance(Super(), Child))
self.assertEqual(False, isinstance(Super(), AbstractSuper))
self.assertEqual(False, isinstance(Super(), AbstractChild))
self.assertEqual(True, isinstance(Child(), Super))
self.assertEqual(False, isinstance(Child(), AbstractSuper))
def test_isinstance_abstract(self):
# abstract instances
self.assertEqual(True, isinstance(AbstractSuper(), AbstractSuper))
self.assertEqual(False, isinstance(AbstractSuper(), AbstractChild))
self.assertEqual(False, isinstance(AbstractSuper(), Super))
self.assertEqual(False, isinstance(AbstractSuper(), Child))
self.assertEqual(True, isinstance(AbstractChild(), AbstractChild))
self.assertEqual(True, isinstance(AbstractChild(), AbstractSuper))
self.assertEqual(False, isinstance(AbstractChild(), Super))
self.assertEqual(False, isinstance(AbstractChild(), Child))
def test_subclass_normal(self):
# normal classes
self.assertEqual(True, issubclass(Super, Super))
self.assertEqual(False, issubclass(Super, AbstractSuper))
self.assertEqual(False, issubclass(Super, Child))
self.assertEqual(True, issubclass(Child, Child))
self.assertEqual(True, issubclass(Child, Super))
self.assertEqual(False, issubclass(Child, AbstractSuper))
def test_subclass_abstract(self):
# abstract classes
self.assertEqual(True, issubclass(AbstractSuper, AbstractSuper))
self.assertEqual(False, issubclass(AbstractSuper, AbstractChild))
self.assertEqual(False, issubclass(AbstractSuper, Child))
self.assertEqual(True, issubclass(AbstractChild, AbstractChild))
self.assertEqual(True, issubclass(AbstractChild, AbstractSuper))
self.assertEqual(False, issubclass(AbstractChild, Super))
self.assertEqual(False, issubclass(AbstractChild, Child))
def test_subclass_tuple(self):
# test with a tuple as the second argument classes
self.assertEqual(True, issubclass(Child, (Child,)))
self.assertEqual(True, issubclass(Child, (Super,)))
self.assertEqual(False, issubclass(Super, (Child,)))
self.assertEqual(True, issubclass(Super, (Child, Super)))
self.assertEqual(False, issubclass(Child, ()))
self.assertEqual(True, issubclass(Super, (Child, (Super,))))
self.assertEqual(True, issubclass(NewChild, (NewChild,)))
self.assertEqual(True, issubclass(NewChild, (NewSuper,)))
self.assertEqual(False, issubclass(NewSuper, (NewChild,)))
self.assertEqual(True, issubclass(NewSuper, (NewChild, NewSuper)))
self.assertEqual(False, issubclass(NewChild, ()))
self.assertEqual(True, issubclass(NewSuper, (NewChild, (NewSuper,))))
self.assertEqual(True, issubclass(int, (int, (float, int))))
self.assertEqual(True, issubclass(str, (str, (Child, NewChild, str))))
def test_subclass_recursion_limit(self):
# make sure that issubclass raises RuntimeError before the C stack is
# blown
self.assertRaises(RuntimeError, blowstack, issubclass, str, str)
def test_isinstance_recursion_limit(self):
# make sure that issubclass raises RuntimeError before the C stack is
# blown
self.assertRaises(RuntimeError, blowstack, isinstance, '', str)
def blowstack(fxn, arg, compare_to):
# Make sure that calling isinstance with a deeply nested tuple for its
# argument will raise RuntimeError eventually.
tuple_arg = (compare_to,)
for cnt in range(sys.getrecursionlimit()+5):
tuple_arg = (tuple_arg,)
fxn(arg, tuple_arg)
def test_main():
support.run_unittest(
TestIsInstanceExceptions,
TestIsSubclassExceptions,
TestIsInstanceIsSubclass
)
if __name__ == '__main__':
test_main()
| lgpl-3.0 |
valkyriesavage/gasustainability | django/db/backends/sqlite3/introspection.py | 160 | 5815 | import re
from django.db.backends import BaseDatabaseIntrospection
# This light wrapper "fakes" a dictionary interface, because some SQLite data
# types include variables in them -- e.g. "varchar(30)" -- and can't be matched
# as a simple dictionary lookup.
class FlexibleFieldLookupDict(object):
# Maps SQL types to Django Field types. Some of the SQL types have multiple
# entries here because SQLite allows for anything and doesn't normalize the
# field type; it uses whatever was given.
base_data_types_reverse = {
'bool': 'BooleanField',
'boolean': 'BooleanField',
'smallint': 'SmallIntegerField',
'smallint unsigned': 'PositiveSmallIntegerField',
'smallinteger': 'SmallIntegerField',
'int': 'IntegerField',
'integer': 'IntegerField',
'bigint': 'BigIntegerField',
'integer unsigned': 'PositiveIntegerField',
'decimal': 'DecimalField',
'real': 'FloatField',
'text': 'TextField',
'char': 'CharField',
'date': 'DateField',
'datetime': 'DateTimeField',
'time': 'TimeField',
}
def __getitem__(self, key):
key = key.lower()
try:
return self.base_data_types_reverse[key]
except KeyError:
import re
m = re.search(r'^\s*(?:var)?char\s*\(\s*(\d+)\s*\)\s*$', key)
if m:
return ('CharField', {'max_length': int(m.group(1))})
raise KeyError
class DatabaseIntrospection(BaseDatabaseIntrospection):
data_types_reverse = FlexibleFieldLookupDict()
def get_table_list(self, cursor):
"Returns a list of table names in the current database."
# Skip the sqlite_sequence system table used for autoincrement key
# generation.
cursor.execute("""
SELECT name FROM sqlite_master
WHERE type='table' AND NOT name='sqlite_sequence'
ORDER BY name""")
return [row[0] for row in cursor.fetchall()]
def get_table_description(self, cursor, table_name):
"Returns a description of the table, with the DB-API cursor.description interface."
return [(info['name'], info['type'], None, None, None, None,
info['null_ok']) for info in self._table_info(cursor, table_name)]
def get_relations(self, cursor, table_name):
"""
Returns a dictionary of {field_index: (field_index_other_table, other_table)}
representing all relationships to the given table. Indexes are 0-based.
"""
# Dictionary of relations to return
relations = {}
# Schema for this table
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s AND type = %s", [table_name, "table"])
results = cursor.fetchone()[0].strip()
results = results[results.index('(')+1:results.rindex(')')]
# Walk through and look for references to other tables. SQLite doesn't
# really have enforced references, but since it echoes out the SQL used
# to create the table we can look for REFERENCES statements used there.
for field_index, field_desc in enumerate(results.split(',')):
field_desc = field_desc.strip()
if field_desc.startswith("UNIQUE"):
continue
m = re.search('references (.*) \(["|](.*)["|]\)', field_desc, re.I)
if not m:
continue
table, column = [s.strip('"') for s in m.groups()]
cursor.execute("SELECT sql FROM sqlite_master WHERE tbl_name = %s", [table])
result = cursor.fetchall()[0]
other_table_results = result[0].strip()
li, ri = other_table_results.index('('), other_table_results.rindex(')')
other_table_results = other_table_results[li+1:ri]
for other_index, other_desc in enumerate(other_table_results.split(',')):
other_desc = other_desc.strip()
if other_desc.startswith('UNIQUE'):
continue
name = other_desc.split(' ', 1)[0].strip('"')
if name == column:
relations[field_index] = (other_index, table)
break
return relations
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of fieldname -> infodict for the given table,
where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
"""
indexes = {}
for info in self._table_info(cursor, table_name):
indexes[info['name']] = {'primary_key': info['pk'] != 0,
'unique': False}
cursor.execute('PRAGMA index_list(%s)' % self.connection.ops.quote_name(table_name))
# seq, name, unique
for index, unique in [(field[1], field[2]) for field in cursor.fetchall()]:
if not unique:
continue
cursor.execute('PRAGMA index_info(%s)' % self.connection.ops.quote_name(index))
info = cursor.fetchall()
# Skip indexes across multiple fields
if len(info) != 1:
continue
name = info[0][2] # seqno, cid, name
indexes[name]['unique'] = True
return indexes
def _table_info(self, cursor, name):
cursor.execute('PRAGMA table_info(%s)' % self.connection.ops.quote_name(name))
# cid, name, type, notnull, dflt_value, pk
return [{'name': field[1],
'type': field[2],
'null_ok': not field[3],
'pk': field[5] # undocumented
} for field in cursor.fetchall()]
| bsd-3-clause |
steveklabnik/servo | tests/wpt/web-platform-tests/2dcontext/tools/specextract.py | 132 | 2100 | import html5lib
import html5lib.treebuilders.dom
# Expected use:
# curl --compressed http://www.whatwg.org/specs/web-apps/current-work/ >current-work
# python specextract.py
#
# Generates current-work-canvas.xhtml, for use by gentest.py to create the annotated spec document
def extract():
parser = html5lib.html5parser.HTMLParser(tree=html5lib.treebuilders.dom.TreeBuilder)
doc = parser.parse(open('current-work'), encoding='utf-8')
head = doc.getElementsByTagName('head')[0]
for n in head.childNodes:
if n.tagName == 'script':
head.removeChild(n)
header = doc.getElementsByTagName('header')[0]
#thecanvas = doc.getElementById('the-canvas') # doesn't work (?!)
thecanvas = [ n for n in doc.getElementsByTagName('h4') if n.getAttribute('id') == 'the-canvas-element' ][0]
keep = [header, thecanvas]
node = thecanvas.nextSibling
while node.nodeName != 'h4':
keep.append(node)
node = node.nextSibling
p = thecanvas.parentNode
for n in p.childNodes[:]:
if n not in keep:
p.removeChild(n)
for n in header.childNodes[3:-4]:
header.removeChild(n)
def make_absolute(uri):
if uri.startswith('data:'):
return uri
elif uri[0] == '/':
return 'http://www.whatwg.org' + uri
else:
return 'http://www.whatwg.org/specs/web-apps/current-work/' + uri
# Fix the stylesheet, icon and image references
for e in doc.getElementsByTagName('link'):
e.setAttribute('href', make_absolute(e.getAttribute('href')))
for img in doc.getElementsByTagName('img'):
img.setAttribute('src', make_absolute(img.getAttribute('src')))
# Convert to XHTML, because it's quicker to re-parse than HTML5
doc.documentElement.setAttribute('xmlns', 'http://www.w3.org/1999/xhtml')
doc.documentElement.setAttribute('xml:lang', doc.documentElement.getAttribute('lang'))
doc.removeChild(doc.firstChild) # remove the DOCTYPE
open('current-work-canvas.xhtml', 'w').write(doc.toxml(encoding = 'UTF-8'))
extract()
| mpl-2.0 |
Jeff-Lewis/namebench | libnamebench/addr_util.py | 173 | 4999 | # Copyright 2009 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions related to IP Addresses & Hostnames."""
# TODO(tstromberg): Investigate replacement with ipaddr library
__author__ = 'tstromberg@google.com (Thomas Stromberg)'
import re
import zlib
import util
# TODO(tstromberg): Find a way to combine the following two regexps.
# Used to decide whether or not to benchmark a name
INTERNAL_RE = re.compile('^0|\.pro[md]z*\.|\.corp|\.bor|\.hot$|internal|dmz|'
'\._[ut][dc]p\.|intra|\.\w$|\.\w{5,}$', re.IGNORECASE)
# Used to decide if a hostname should be censored later.
PRIVATE_RE = re.compile('^\w+dc\.|^\w+ds\.|^\w+sv\.|^\w+nt\.|\.corp|internal|'
'intranet|\.local', re.IGNORECASE)
# ^.*[\w-]+\.[\w-]+\.[\w-]+\.[a-zA-Z]+\.$|^[\w-]+\.[\w-]{3,}\.[a-zA-Z]+\.$
FQDN_RE = re.compile('^.*\..*\..*\..*\.$|^.*\.[\w-]*\.\w{3,4}\.$|^[\w-]+\.[\w-]{4,}\.\w+\.')
IP_RE = re.compile('^[0-9.]+$')
KNOWN_SECOND_DOMAINS = [x.rstrip() for x in open(util.FindDataFile('data/second_level_domains.txt')).readlines()]
def ExtractIPsFromString(ip_string):
"""Return a tuple of ip addressed held in a string."""
ips = []
# IPV6 If this regexp is too loose, see Regexp-IPv6 in CPAN for inspiration.
ips.extend(re.findall('[\dabcdef:]+:[\dabcdef:]+', ip_string, re.IGNORECASE))
for ip in re.findall('\d+\.\d+\.\d+\.+\d+', ip_string):
# Remove any leading zeros
ips.append(re.sub('\.0(\d+)', '.\\1', ip))
return ips
def IsPrivateHostname(hostname):
"""Basic matching to determine if the hostname is likely to be 'internal'."""
if PRIVATE_RE.search(hostname):
return True
else:
return False
def GetNetworkForIp(ip, reverse=False):
"""Turn into a class C."""
if '.' in ip:
bits = ip.split('.')[0:3]
if reverse:
bits.reverse()
return '.'.join(bits)
elif ':' in ip:
print "GetNetworkForIp() does not yet support IPv6"
return None
def GetDomainFromHostname(hostname):
"""Get the domain part of a hostname."""
for second_level in KNOWN_SECOND_DOMAINS:
if hostname.lower().endswith(second_level):
custom_part = hostname.replace(second_level, '').split('.')[-1]
return '%s%s' % (custom_part, second_level)
return '.'.join(hostname.split('.')[-2:]).lower()
def GetProviderPartOfHostname(hostname):
"""Get the custom patr of a hostname"""
return GetDomainFromHostname(hostname).split('.')[0]
def IsLoopbackIP(ip):
"""Boolean check to see if an IP is private or not.
Args:
ip: str
Returns:
Boolean
"""
if ip.startswith('127.') or ip == '::1':
return True
else:
return False
def IsPrivateIP(ip):
"""Boolean check to see if an IP is private or not.
Args:
ip: str
Returns:
Number of bits that should be preserved (int, or None)
"""
if re.match('^10\.', ip):
return 1
elif re.match('^192\.168', ip):
return 2
elif re.match('^172\.(1[6-9]|2[0-9]|3[0-1])\.', ip):
return 1
else:
return None
def MaskStringWithIPs(string):
"""Mask all private IP addresses listed in a string."""
ips = ExtractIPsFromString(string)
for ip in ips:
use_bits = IsPrivateIP(ip)
if use_bits:
masked_ip = MaskIPBits(ip, use_bits)
string = string.replace(ip, masked_ip)
return string
def MaskIPBits(ip, use_bits):
"""Mask an IP, but still keep a meaningful checksum."""
ip_parts = ip.split('.')
checksum = zlib.crc32(''.join(ip_parts[use_bits:]))
masked_ip = '.'.join(ip_parts[0:use_bits])
return masked_ip + '.x-' + str(checksum)[-4:]
def MaskPrivateHost(ip, hostname, name):
"""Mask unnamed private IP's."""
# If we have a name not listed as SYS-x.x.x.x, then we're clear.
if name and ip not in name:
# Even if we are listed (Internal 192-0-1 for instance), data can leak via hostname.
if IsPrivateIP(ip):
hostname = 'internal.ip'
return (ip, hostname, name)
use_bits = IsPrivateIP(ip)
if use_bits:
masked_ip = MaskIPBits(ip, use_bits)
masked_hostname = 'internal.ip'
elif IsPrivateHostname(hostname):
masked_ip = MaskIPBits(ip, 2)
masked_hostname = 'internal.name'
else:
masked_ip = ip
masked_hostname = hostname
if 'SYS-' in name:
masked_name = 'SYS-%s' % masked_ip
else:
masked_name = ''
return (masked_ip, masked_hostname, masked_name)
if __name__ == '__main__':
print MaskStringWithIPs('10.0.0.1 has a sharing relationship with 192.168.0.1 and 8.8.8.8')
| apache-2.0 |
warscain/deploy_juice | aaa.py | 1 | 1590 | #!/usr/bin/python
#coding=utf-8
#
#code style:
#Dir_Working = 'aaaa'
#def PrjControl(hostname)
#Class HostControl
#
from common import project
# def DeployCreate(dplname, osver, dplloc, dpldep=None):
aaa = project.DeployPrj()
# print aaa._DeployCreate_Judge('1',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl1',[2])
# print aaa._DeployCreate_Judge('2',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl2',[2])
# print aaa._DeployCreate_Judge('12311',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdplnew',[1,2,3])
aaa.DeployCreate('1',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/1')
aaa.DeployCreate('2',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/2')
aaa.DeployCreate('3',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/3')
aaa.DeployCreate('12311',['Ubuntu-12.04-x86_64','centos-5.3-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/12311',["1",2,3])
aaa.DeployCreate('4444',['Ubuntu-12.04-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/4444',[12311])
aaa.DeployCreate('555',['Ubuntu-12.04-x86_64'],'/source/pycharm/deploy_juice/testdirdpl/555',[12311,2])
print 'DPL LIST', aaa.DeployList()
print aaa.DeployRead(555)
print aaa.DeployRead('1')
print aaa.DeployRead(12311)
aaa.DeployDelete(1)
aaa.DeployDelete(2)
aaa.DeployDelete(3)
aaa.DeployDelete(12311)
aaa.DeployDelete(4444)
aaa.DeployDelete(555)
aaa._MkDeepDir('/tmp/a/b/c/d/s/12', 0755)
| gpl-3.0 |
GREO/GNU-Radio | gr-wxgui/src/python/scopesink_gl.py | 1 | 7774 | #
# Copyright 2008,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
##################################################
# Imports
##################################################
import scope_window
import common
from gnuradio import gr
from pubsub import pubsub
from constants import *
import math
class ac_couple_block(gr.hier_block2):
"""
AC couple the incoming stream by subtracting out the low pass signal.
Mute the low pass filter to disable ac coupling.
"""
def __init__(self, controller, ac_couple_key, sample_rate_key):
gr.hier_block2.__init__(
self,
"ac_couple",
gr.io_signature(1, 1, gr.sizeof_float),
gr.io_signature(1, 1, gr.sizeof_float),
)
#blocks
lpf = gr.single_pole_iir_filter_ff(0.0)
sub = gr.sub_ff()
mute = gr.mute_ff()
#connect
self.connect(self, sub, self)
self.connect(self, lpf, mute, (sub, 1))
#subscribe
controller.subscribe(ac_couple_key, lambda x: mute.set_mute(not x))
controller.subscribe(sample_rate_key, lambda x: lpf.set_taps(0.05))
#initialize
controller[ac_couple_key] = controller[ac_couple_key]
controller[sample_rate_key] = controller[sample_rate_key]
##################################################
# Scope sink block (wrapper for old wxgui)
##################################################
class _scope_sink_base(gr.hier_block2, common.wxgui_hb):
"""
A scope block with a gui window.
"""
def __init__(
self,
parent,
title='',
sample_rate=1,
size=scope_window.DEFAULT_WIN_SIZE,
v_scale=0,
t_scale=0,
v_offset=0,
xy_mode=False,
ac_couple=False,
num_inputs=1,
frame_rate=scope_window.DEFAULT_FRAME_RATE,
use_persistence=False,
persist_alpha=None,
**kwargs #do not end with a comma
):
#ensure analog alpha
if persist_alpha is None:
actual_frame_rate=float(frame_rate)
analog_cutoff_freq=0.5 # Hertz
#calculate alpha from wanted cutoff freq
persist_alpha = 1.0 - math.exp(-2.0*math.pi*analog_cutoff_freq/actual_frame_rate)
if not t_scale: t_scale = 10.0/sample_rate
#init
gr.hier_block2.__init__(
self,
"scope_sink",
gr.io_signature(num_inputs, num_inputs, self._item_size),
gr.io_signature(0, 0, 0),
)
#scope
msgq = gr.msg_queue(2)
scope = gr.oscope_sink_f(sample_rate, msgq)
#controller
self.controller = pubsub()
self.controller.subscribe(SAMPLE_RATE_KEY, scope.set_sample_rate)
self.controller.publish(SAMPLE_RATE_KEY, scope.sample_rate)
self.controller.subscribe(DECIMATION_KEY, scope.set_decimation_count)
self.controller.publish(DECIMATION_KEY, scope.get_decimation_count)
self.controller.subscribe(TRIGGER_LEVEL_KEY, scope.set_trigger_level)
self.controller.publish(TRIGGER_LEVEL_KEY, scope.get_trigger_level)
self.controller.subscribe(TRIGGER_MODE_KEY, scope.set_trigger_mode)
self.controller.publish(TRIGGER_MODE_KEY, scope.get_trigger_mode)
self.controller.subscribe(TRIGGER_SLOPE_KEY, scope.set_trigger_slope)
self.controller.publish(TRIGGER_SLOPE_KEY, scope.get_trigger_slope)
self.controller.subscribe(TRIGGER_CHANNEL_KEY, scope.set_trigger_channel)
self.controller.publish(TRIGGER_CHANNEL_KEY, scope.get_trigger_channel)
actual_num_inputs = self._real and num_inputs or num_inputs*2
#init ac couple
for i in range(actual_num_inputs):
self.controller[common.index_key(AC_COUPLE_KEY, i)] = ac_couple
#start input watcher
common.input_watcher(msgq, self.controller, MSG_KEY)
#create window
self.win = scope_window.scope_window(
parent=parent,
controller=self.controller,
size=size,
title=title,
frame_rate=frame_rate,
num_inputs=actual_num_inputs,
sample_rate_key=SAMPLE_RATE_KEY,
t_scale=t_scale,
v_scale=v_scale,
v_offset=v_offset,
xy_mode=xy_mode,
ac_couple_key=AC_COUPLE_KEY,
trigger_level_key=TRIGGER_LEVEL_KEY,
trigger_mode_key=TRIGGER_MODE_KEY,
trigger_slope_key=TRIGGER_SLOPE_KEY,
trigger_channel_key=TRIGGER_CHANNEL_KEY,
decimation_key=DECIMATION_KEY,
msg_key=MSG_KEY,
use_persistence=use_persistence,
persist_alpha=persist_alpha,
)
common.register_access_methods(self, self.win)
#connect
if self._real:
for i in range(num_inputs):
self.wxgui_connect(
(self, i),
ac_couple_block(self.controller, common.index_key(AC_COUPLE_KEY, i), SAMPLE_RATE_KEY),
(scope, i),
)
else:
for i in range(num_inputs):
c2f = gr.complex_to_float()
self.wxgui_connect((self, i), c2f)
for j in range(2):
self.connect(
(c2f, j),
ac_couple_block(self.controller, common.index_key(AC_COUPLE_KEY, 2*i+j), SAMPLE_RATE_KEY),
(scope, 2*i+j),
)
class scope_sink_f(_scope_sink_base):
_item_size = gr.sizeof_float
_real = True
class scope_sink_c(_scope_sink_base):
_item_size = gr.sizeof_gr_complex
_real = False
# ----------------------------------------------------------------
# Stand-alone test application
# ----------------------------------------------------------------
import wx
from gnuradio.wxgui import stdgui2
class test_top_block (stdgui2.std_top_block):
def __init__(self, frame, panel, vbox, argv):
stdgui2.std_top_block.__init__ (self, frame, panel, vbox, argv)
default_input_rate = 1e6
if len(argv) > 1:
input_rate = int(argv[1])
else:
input_rate = default_input_rate
if len(argv) > 2:
v_scale = float(argv[2]) # start up at this v_scale value
else:
v_scale = None # start up in autorange mode, default
if len(argv) > 3:
t_scale = float(argv[3]) # start up at this t_scale value
else:
t_scale = .00003*default_input_rate/input_rate # old behavior
print "input rate %s v_scale %s t_scale %s" % (input_rate,v_scale,t_scale)
# Generate a complex sinusoid
ampl=1.0e3
self.src0 = gr.sig_source_c (input_rate, gr.GR_SIN_WAVE, 25.1e3*input_rate/default_input_rate, ampl)
self.noise =gr.sig_source_c (input_rate, gr.GR_SIN_WAVE, 11.1*25.1e3*input_rate/default_input_rate, ampl/10)
#self.noise =gr.noise_source_c(gr.GR_GAUSSIAN, ampl/10)
self.combine=gr.add_cc()
# We add this throttle block so that this demo doesn't suck down
# all the CPU available. You normally wouldn't use it...
self.thr = gr.throttle(gr.sizeof_gr_complex, input_rate)
scope = scope_sink_c (panel,"Secret Data",sample_rate=input_rate,
v_scale=v_scale, t_scale=t_scale)
vbox.Add (scope.win, 1, wx.EXPAND)
# Ultimately this will be
# self.connect("src0 throttle scope")
self.connect(self.src0,(self.combine,0))
self.connect(self.noise,(self.combine,1))
self.connect(self.combine, self.thr, scope)
def main ():
app = stdgui2.stdapp (test_top_block, "O'Scope Test App")
app.MainLoop ()
if __name__ == '__main__':
main ()
| gpl-3.0 |
rjschof/gem5 | tests/configs/realview-o3-checker.py | 5 | 2435 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
from common.O3_ARM_v7a import O3_ARM_v7a_3
root = LinuxArmFSSystemUniprocessor(mem_mode='timing',
mem_class=DDR3_1600_x64,
cpu_class=O3_ARM_v7a_3,
checker=True).create_root()
| bsd-3-clause |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.