repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
|---|---|---|---|---|---|
fgesora/odoo
|
addons/note/__openerp__.py
|
260
|
2182
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Notes',
'version': '1.0',
'category': 'Tools',
'description': """
This module allows users to create their own notes inside OpenERP
=================================================================
Use notes to write meeting minutes, organize ideas, organize personal todo
lists, etc. Each user manages his own personal Notes. Notes are available to
their authors only, but they can share notes to others users so that several
people can work on the same note in real time. It's very efficient to share
meeting minutes.
Notes can be found in the 'Home' menu.
""",
'author': 'OpenERP SA',
'website': 'https://www.odoo.com/page/notes',
'summary': 'Sticky notes, Collaborative, Memos',
'sequence': 9,
'depends': [
'mail',
],
'data': [
'security/note_security.xml',
'security/ir.rule.xml',
'security/ir.model.access.csv',
'note_data.xml',
'note_view.xml',
'views/note.xml',
],
'demo': [
'note_demo.xml',
],
'test': [
],
'installable': True,
'application': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
patrick91/pycon
|
backend/pages/migrations/0001_initial.py
|
1
|
1619
|
# Generated by Django 2.2.4 on 2019-08-27 22:02
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('conferences', '0007_auto_20190811_1953'),
]
operations = [
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, editable=False, verbose_name='created')),
('modified', model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, editable=False, verbose_name='modified')),
('title', models.CharField(max_length=200, verbose_name='title')),
('slug', models.SlugField(blank=True, max_length=200, verbose_name='slug')),
('content', models.TextField(verbose_name='content')),
('published', models.BooleanField(default=False, verbose_name='published')),
('image', models.ImageField(blank=True, null=True, upload_to='pages', verbose_name='image')),
('conference', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='pages', to='conferences.Conference', verbose_name='conference')),
],
options={
'ordering': ['-published'],
'unique_together': {('slug', 'conference')},
},
),
]
|
mit
|
claneys/shinken
|
test/test_satellites.py
|
18
|
2982
|
#!/usr/bin/env python
# Copyright (C) 2009-2014:
# Gabes Jean, naparuba@gmail.com
# Gerhard Lausser, Gerhard.Lausser@consol.de
#
# This file is part of Shinken.
#
# Shinken is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Shinken is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Shinken. If not, see <http://www.gnu.org/licenses/>.
#
# This file is used to test reading and processing of config files
#
from shinken_test import *
class TestConfig(ShinkenTest):
# setUp is inherited from ShinkenTest
def test_satellite_failed_check(self):
print "Create a Scheduler dummy"
r = self.conf.realms.find_by_name('Default')
creation_tab = {'scheduler_name': 'scheduler-1', 'address': '0.0.0.0', 'spare': '0',
'port': '9999', 'check_interval': '1', 'realm': 'Default', 'use_ssl': '0', 'hard_ssl_name_check': '0'}
s = SchedulerLink(creation_tab)
s.last_check = time.time() - 100
s.timeout = 3
s.check_interval = 1
s.data_timeout = 120
s.port = 9999
s.max_check_attempts = 4
s.realm = r
# Lie: we start at true here
s.alive = True
print s.__dict__
# Should be attempt = 0
self.assertEqual(0, s.attempt)
# Now make bad ping, sould be unreach and dead (but not dead
s.ping()
self.assertEqual(1, s.attempt)
self.assertEqual(True, s.alive)
self.assertEqual(False, s.reachable)
# Now make bad ping, sould be unreach and dead (but not dead
s.last_check = time.time() - 100
s.ping()
self.assertEqual(2, s.attempt)
self.assertEqual(True, s.alive)
self.assertEqual(False, s.reachable)
# Now make bad ping, sould be unreach and dead (but not dead
s.last_check = time.time() - 100
s.ping()
self.assertEqual(3, s.attempt)
self.assertEqual(True, s.alive)
self.assertEqual(False, s.reachable)
# Ok, this time we go DEAD!
s.last_check = time.time() - 100
s.ping()
self.assertEqual(4, s.attempt)
self.assertEqual(False, s.alive)
self.assertEqual(False, s.reachable)
# Now set a OK ping (false because we won't open the port here...)
s.last_check = time.time() - 100
s.set_alive()
self.assertEqual(0, s.attempt)
self.assertEqual(True, s.alive)
self.assertEqual(True, s.reachable)
if __name__ == '__main__':
unittest.main()
|
agpl-3.0
|
bob-the-hamster/kivy
|
kivy/uix/floatlayout.py
|
26
|
3807
|
'''
Float Layout
============
:class:`FloatLayout` honors the :attr:`~kivy.uix.widget.Widget.pos_hint`
and the :attr:`~kivy.uix.widget.Widget.size_hint` properties of its children.
.. only:: html
.. image:: images/floatlayout.gif
:align: right
.. only:: latex
.. image:: images/floatlayout.png
:align: right
For example, a FloatLayout with a size of (300, 300) is created::
layout = FloatLayout(size=(300, 300))
By default, all widgets have their size_hint=(1, 1), so this button will adopt
the same size as the layout::
button = Button(text='Hello world')
layout.add_widget(button)
To create a button 50% of the width and 25% of the height of the layout and
positioned at (20, 20), you can do::
button = Button(
text='Hello world',
size_hint=(.5, .25),
pos=(20, 20))
If you want to create a button that will always be the size of layout minus
20% on each side::
button = Button(text='Hello world', size_hint=(.6, .6),
pos_hint={'x':.2, 'y':.2})
.. note::
This layout can be used for an application. Most of the time, you will
use the size of Window.
.. warning::
If you are not using pos_hint, you must handle the positioning of the
children: if the float layout is moving, you must handle moving the
children too.
'''
__all__ = ('FloatLayout', )
from kivy.uix.layout import Layout
class FloatLayout(Layout):
'''Float layout class. See module documentation for more information.
'''
def __init__(self, **kwargs):
kwargs.setdefault('size', (1, 1))
super(FloatLayout, self).__init__(**kwargs)
fbind = self.fbind
update = self._trigger_layout
fbind('children', update)
fbind('pos', update)
fbind('pos_hint', update)
fbind('size_hint', update)
fbind('size', update)
def do_layout(self, *largs, **kwargs):
# optimization, until the size is 1, 1, don't do layout
if self.size == [1, 1]:
return
# optimize layout by preventing looking at the same attribute in a loop
w, h = kwargs.get('size', self.size)
x, y = kwargs.get('pos', self.pos)
for c in self.children:
# size
shw, shh = c.size_hint
if shw and shh:
c.size = w * shw, h * shh
elif shw:
c.width = w * shw
elif shh:
c.height = h * shh
# pos
for key, value in c.pos_hint.items():
if key == 'x':
c.x = x + value * w
elif key == 'right':
c.right = x + value * w
elif key == 'pos':
c.pos = x + value[0] * w, y + value[1] * h
elif key == 'y':
c.y = y + value * h
elif key == 'top':
c.top = y + value * h
elif key == 'center':
c.center = x + value[0] * w, y + value[1] * h
elif key == 'center_x':
c.center_x = x + value * w
elif key == 'center_y':
c.center_y = y + value * h
def add_widget(self, widget, index=0):
widget.bind(
#size=self._trigger_layout,
#size_hint=self._trigger_layout,
pos=self._trigger_layout,
pos_hint=self._trigger_layout)
return super(FloatLayout, self).add_widget(widget, index)
def remove_widget(self, widget):
widget.unbind(
#size=self._trigger_layout,
#size_hint=self._trigger_layout,
pos=self._trigger_layout,
pos_hint=self._trigger_layout)
return super(FloatLayout, self).remove_widget(widget)
|
mit
|
e-gob/plataforma-kioscos-autoatencion
|
scripts/ansible-play/.venv/lib/python2.7/site-packages/ansible/plugins/connection/zone.py
|
21
|
7937
|
# Based on local.py (c) 2012, Michael DeHaan <michael.dehaan@gmail.com>
# and chroot.py (c) 2013, Maykel Moya <mmoya@speedyrails.com>
# and jail.py (c) 2013, Michael Scherer <misc@zarb.org>
# (c) 2015, Dagobert Michelsen <dam@baltic-online.de>
# (c) 2015, Toshio Kuratomi <tkuratomi@ansible.com>
# Copyright (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
author: Ansible Core Team
connection: zone
short_description: Run tasks in a zone instance
description:
- Run commands or put/fetch files to an existing zone
version_added: "2.0"
options:
remote_addr:
description:
- Zone identifire
default: inventory_hostname
vars:
- name: ansible_host
- name: ansible_zone_host
"""
import distutils.spawn
import os
import os.path
import subprocess
import traceback
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.module_utils.six.moves import shlex_quote
from ansible.module_utils._text import to_bytes
from ansible.plugins.connection import ConnectionBase, BUFSIZE
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class Connection(ConnectionBase):
''' Local zone based connections '''
transport = 'zone'
has_pipelining = True
become_methods = frozenset(C.BECOME_METHODS).difference(('su',))
def __init__(self, play_context, new_stdin, *args, **kwargs):
super(Connection, self).__init__(play_context, new_stdin, *args, **kwargs)
self.zone = self._play_context.remote_addr
if os.geteuid() != 0:
raise AnsibleError("zone connection requires running as root")
self.zoneadm_cmd = to_bytes(self._search_executable('zoneadm'))
self.zlogin_cmd = to_bytes(self._search_executable('zlogin'))
if self.zone not in self.list_zones():
raise AnsibleError("incorrect zone name %s" % self.zone)
@staticmethod
def _search_executable(executable):
cmd = distutils.spawn.find_executable(executable)
if not cmd:
raise AnsibleError("%s command not found in PATH" % executable)
return cmd
def list_zones(self):
process = subprocess.Popen([self.zoneadm_cmd, 'list', '-ip'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
zones = []
for l in process.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
zones.append(s[1])
return zones
def get_zone_path(self):
# solaris10vm# zoneadm -z cswbuild list -p
# -:cswbuild:installed:/zones/cswbuild:479f3c4b-d0c6-e97b-cd04-fd58f2c0238e:native:shared
process = subprocess.Popen([self.zoneadm_cmd, '-z', to_bytes(self.zone), 'list', '-p'],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
# stdout, stderr = p.communicate()
path = process.stdout.readlines()[0].split(':')[3]
return path + '/root'
def _connect(self):
''' connect to the zone; nothing to do here '''
super(Connection, self)._connect()
if not self._connected:
display.vvv("THIS IS A LOCAL ZONE DIR", host=self.zone)
self._connected = True
def _buffered_exec_command(self, cmd, stdin=subprocess.PIPE):
''' run a command on the zone. This is only needed for implementing
put_file() get_file() so that we don't have to read the whole file
into memory.
compared to exec_command() it looses some niceties like being able to
return the process's exit code immediately.
'''
# NOTE: zlogin invokes a shell (just like ssh does) so we do not pass
# this through /bin/sh -c here. Instead it goes through the shell
# that zlogin selects.
local_cmd = [self.zlogin_cmd, self.zone, cmd]
local_cmd = map(to_bytes, local_cmd)
display.vvv("EXEC %s" % (local_cmd), host=self.zone)
p = subprocess.Popen(local_cmd, shell=False, stdin=stdin,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
return p
def exec_command(self, cmd, in_data=None, sudoable=False):
''' run a command on the zone '''
super(Connection, self).exec_command(cmd, in_data=in_data, sudoable=sudoable)
p = self._buffered_exec_command(cmd)
stdout, stderr = p.communicate(in_data)
return (p.returncode, stdout, stderr)
def _prefix_login_path(self, remote_path):
''' Make sure that we put files into a standard path
If a path is relative, then we need to choose where to put it.
ssh chooses $HOME but we aren't guaranteed that a home dir will
exist in any given chroot. So for now we're choosing "/" instead.
This also happens to be the former default.
Can revisit using $HOME instead if it's a problem
'''
if not remote_path.startswith(os.path.sep):
remote_path = os.path.join(os.path.sep, remote_path)
return os.path.normpath(remote_path)
def put_file(self, in_path, out_path):
''' transfer a file from local to zone '''
super(Connection, self).put_file(in_path, out_path)
display.vvv("PUT %s TO %s" % (in_path, out_path), host=self.zone)
out_path = shlex_quote(self._prefix_login_path(out_path))
try:
with open(in_path, 'rb') as in_file:
try:
p = self._buffered_exec_command('dd of=%s bs=%s' % (out_path, BUFSIZE), stdin=in_file)
except OSError:
raise AnsibleError("jail connection requires dd command in the jail")
try:
stdout, stderr = p.communicate()
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
except IOError:
raise AnsibleError("file or module does not exist at: %s" % in_path)
def fetch_file(self, in_path, out_path):
''' fetch a file from zone to local '''
super(Connection, self).fetch_file(in_path, out_path)
display.vvv("FETCH %s TO %s" % (in_path, out_path), host=self.zone)
in_path = shlex_quote(self._prefix_login_path(in_path))
try:
p = self._buffered_exec_command('dd if=%s bs=%s' % (in_path, BUFSIZE))
except OSError:
raise AnsibleError("zone connection requires dd command in the zone")
with open(out_path, 'wb+') as out_file:
try:
chunk = p.stdout.read(BUFSIZE)
while chunk:
out_file.write(chunk)
chunk = p.stdout.read(BUFSIZE)
except:
traceback.print_exc()
raise AnsibleError("failed to transfer file %s to %s" % (in_path, out_path))
stdout, stderr = p.communicate()
if p.returncode != 0:
raise AnsibleError("failed to transfer file %s to %s:\n%s\n%s" % (in_path, out_path, stdout, stderr))
def close(self):
''' terminate the connection; nothing to do here '''
super(Connection, self).close()
self._connected = False
|
bsd-3-clause
|
jblackburne/scikit-learn
|
sklearn/tree/export.py
|
12
|
16020
|
"""
This module defines export functions for decision trees.
"""
# Authors: Gilles Louppe <g.louppe@gmail.com>
# Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Brian Holt <bdholt1@gmail.com>
# Noel Dawe <noel@dawe.me>
# Satrajit Gosh <satrajit.ghosh@gmail.com>
# Trevor Stephens <trev.stephens@gmail.com>
# License: BSD 3 clause
import numpy as np
from ..externals import six
from . import _criterion
from . import _tree
def _color_brew(n):
"""Generate n colors with equally spaced hues.
Parameters
----------
n : int
The number of colors required.
Returns
-------
color_list : list, length n
List of n tuples of form (R, G, B) being the components of each color.
"""
color_list = []
# Initialize saturation & value; calculate chroma & value shift
s, v = 0.75, 0.9
c = s * v
m = v - c
for h in np.arange(25, 385, 360. / n).astype(int):
# Calculate some intermediate values
h_bar = h / 60.
x = c * (1 - abs((h_bar % 2) - 1))
# Initialize RGB with same hue & chroma as our color
rgb = [(c, x, 0),
(x, c, 0),
(0, c, x),
(0, x, c),
(x, 0, c),
(c, 0, x),
(c, x, 0)]
r, g, b = rgb[int(h_bar)]
# Shift the initial RGB values to match value and store
rgb = [(int(255 * (r + m))),
(int(255 * (g + m))),
(int(255 * (b + m)))]
color_list.append(rgb)
return color_list
def export_graphviz(decision_tree, out_file="tree.dot", max_depth=None,
feature_names=None, class_names=None, label='all',
filled=False, leaves_parallel=False, impurity=True,
node_ids=False, proportion=False, rotate=False,
rounded=False, special_characters=False):
"""Export a decision tree in DOT format.
This function generates a GraphViz representation of the decision tree,
which is then written into `out_file`. Once exported, graphical renderings
can be generated using, for example::
$ dot -Tps tree.dot -o tree.ps (PostScript format)
$ dot -Tpng tree.dot -o tree.png (PNG format)
The sample counts that are shown are weighted with any sample_weights that
might be present.
Read more in the :ref:`User Guide <tree>`.
Parameters
----------
decision_tree : decision tree classifier
The decision tree to be exported to GraphViz.
out_file : file object or string, optional (default="tree.dot")
Handle or name of the output file.
max_depth : int, optional (default=None)
The maximum depth of the representation. If None, the tree is fully
generated.
feature_names : list of strings, optional (default=None)
Names of each of the features.
class_names : list of strings, bool or None, optional (default=None)
Names of each of the target classes in ascending numerical order.
Only relevant for classification and not supported for multi-output.
If ``True``, shows a symbolic representation of the class name.
label : {'all', 'root', 'none'}, optional (default='all')
Whether to show informative labels for impurity, etc.
Options include 'all' to show at every node, 'root' to show only at
the top root node, or 'none' to not show at any node.
filled : bool, optional (default=False)
When set to ``True``, paint nodes to indicate majority class for
classification, extremity of values for regression, or purity of node
for multi-output.
leaves_parallel : bool, optional (default=False)
When set to ``True``, draw all leaf nodes at the bottom of the tree.
impurity : bool, optional (default=True)
When set to ``True``, show the impurity at each node.
node_ids : bool, optional (default=False)
When set to ``True``, show the ID number on each node.
proportion : bool, optional (default=False)
When set to ``True``, change the display of 'values' and/or 'samples'
to be proportions and percentages respectively.
rotate : bool, optional (default=False)
When set to ``True``, orient tree left to right rather than top-down.
rounded : bool, optional (default=False)
When set to ``True``, draw node boxes with rounded corners and use
Helvetica fonts instead of Times-Roman.
special_characters : bool, optional (default=False)
When set to ``False``, ignore special characters for PostScript
compatibility.
Examples
--------
>>> from sklearn.datasets import load_iris
>>> from sklearn import tree
>>> clf = tree.DecisionTreeClassifier()
>>> iris = load_iris()
>>> clf = clf.fit(iris.data, iris.target)
>>> tree.export_graphviz(clf,
... out_file='tree.dot') # doctest: +SKIP
"""
def get_color(value):
# Find the appropriate color & intensity for a node
if colors['bounds'] is None:
# Classification tree
color = list(colors['rgb'][np.argmax(value)])
sorted_values = sorted(value, reverse=True)
if len(sorted_values) == 1:
alpha = 0
else:
alpha = int(np.round(255 * (sorted_values[0] - sorted_values[1]) /
(1 - sorted_values[1]), 0))
else:
# Regression tree or multi-output
color = list(colors['rgb'][0])
alpha = int(np.round(255 * ((value - colors['bounds'][0]) /
(colors['bounds'][1] -
colors['bounds'][0])), 0))
# Return html color code in #RRGGBBAA format
color.append(alpha)
hex_codes = [str(i) for i in range(10)]
hex_codes.extend(['a', 'b', 'c', 'd', 'e', 'f'])
color = [hex_codes[c // 16] + hex_codes[c % 16] for c in color]
return '#' + ''.join(color)
def node_to_str(tree, node_id, criterion):
# Generate the node content string
if tree.n_outputs == 1:
value = tree.value[node_id][0, :]
else:
value = tree.value[node_id]
# Should labels be shown?
labels = (label == 'root' and node_id == 0) or label == 'all'
# PostScript compatibility for special characters
if special_characters:
characters = ['#', '<SUB>', '</SUB>', '≤', '<br/>', '>']
node_string = '<'
else:
characters = ['#', '[', ']', '<=', '\\n', '"']
node_string = '"'
# Write node ID
if node_ids:
if labels:
node_string += 'node '
node_string += characters[0] + str(node_id) + characters[4]
# Write decision criteria
if tree.children_left[node_id] != _tree.TREE_LEAF:
# Always write node decision criteria, except for leaves
if feature_names is not None:
feature = feature_names[tree.feature[node_id]]
else:
feature = "X%s%s%s" % (characters[1],
tree.feature[node_id],
characters[2])
node_string += '%s %s %s%s' % (feature,
characters[3],
round(tree.threshold[node_id], 4),
characters[4])
# Write impurity
if impurity:
if isinstance(criterion, _criterion.FriedmanMSE):
criterion = "friedman_mse"
elif not isinstance(criterion, six.string_types):
criterion = "impurity"
if labels:
node_string += '%s = ' % criterion
node_string += (str(round(tree.impurity[node_id], 4)) +
characters[4])
# Write node sample count
if labels:
node_string += 'samples = '
if proportion:
percent = (100. * tree.n_node_samples[node_id] /
float(tree.n_node_samples[0]))
node_string += (str(round(percent, 1)) + '%' +
characters[4])
else:
node_string += (str(tree.n_node_samples[node_id]) +
characters[4])
# Write node class distribution / regression value
if proportion and tree.n_classes[0] != 1:
# For classification this will show the proportion of samples
value = value / tree.weighted_n_node_samples[node_id]
if labels:
node_string += 'value = '
if tree.n_classes[0] == 1:
# Regression
value_text = np.around(value, 4)
elif proportion:
# Classification
value_text = np.around(value, 2)
elif np.all(np.equal(np.mod(value, 1), 0)):
# Classification without floating-point weights
value_text = value.astype(int)
else:
# Classification with floating-point weights
value_text = np.around(value, 4)
# Strip whitespace
value_text = str(value_text.astype('S32')).replace("b'", "'")
value_text = value_text.replace("' '", ", ").replace("'", "")
if tree.n_classes[0] == 1 and tree.n_outputs == 1:
value_text = value_text.replace("[", "").replace("]", "")
value_text = value_text.replace("\n ", characters[4])
node_string += value_text + characters[4]
# Write node majority class
if (class_names is not None and
tree.n_classes[0] != 1 and
tree.n_outputs == 1):
# Only done for single-output classification trees
if labels:
node_string += 'class = '
if class_names is not True:
class_name = class_names[np.argmax(value)]
else:
class_name = "y%s%s%s" % (characters[1],
np.argmax(value),
characters[2])
node_string += class_name
# Clean up any trailing newlines
if node_string[-2:] == '\\n':
node_string = node_string[:-2]
if node_string[-5:] == '<br/>':
node_string = node_string[:-5]
return node_string + characters[5]
def recurse(tree, node_id, criterion, parent=None, depth=0):
if node_id == _tree.TREE_LEAF:
raise ValueError("Invalid node_id %s" % _tree.TREE_LEAF)
left_child = tree.children_left[node_id]
right_child = tree.children_right[node_id]
# Add node with description
if max_depth is None or depth <= max_depth:
# Collect ranks for 'leaf' option in plot_options
if left_child == _tree.TREE_LEAF:
ranks['leaves'].append(str(node_id))
elif str(depth) not in ranks:
ranks[str(depth)] = [str(node_id)]
else:
ranks[str(depth)].append(str(node_id))
out_file.write('%d [label=%s'
% (node_id,
node_to_str(tree, node_id, criterion)))
if filled:
# Fetch appropriate color for node
if 'rgb' not in colors:
# Initialize colors and bounds if required
colors['rgb'] = _color_brew(tree.n_classes[0])
if tree.n_outputs != 1:
# Find max and min impurities for multi-output
colors['bounds'] = (np.min(-tree.impurity),
np.max(-tree.impurity))
elif tree.n_classes[0] == 1 and len(np.unique(tree.value)) != 1:
# Find max and min values in leaf nodes for regression
colors['bounds'] = (np.min(tree.value),
np.max(tree.value))
if tree.n_outputs == 1:
node_val = (tree.value[node_id][0, :] /
tree.weighted_n_node_samples[node_id])
if tree.n_classes[0] == 1:
# Regression
node_val = tree.value[node_id][0, :]
else:
# If multi-output color node by impurity
node_val = -tree.impurity[node_id]
out_file.write(', fillcolor="%s"' % get_color(node_val))
out_file.write('] ;\n')
if parent is not None:
# Add edge to parent
out_file.write('%d -> %d' % (parent, node_id))
if parent == 0:
# Draw True/False labels if parent is root node
angles = np.array([45, -45]) * ((rotate - .5) * -2)
out_file.write(' [labeldistance=2.5, labelangle=')
if node_id == 1:
out_file.write('%d, headlabel="True"]' % angles[0])
else:
out_file.write('%d, headlabel="False"]' % angles[1])
out_file.write(' ;\n')
if left_child != _tree.TREE_LEAF:
recurse(tree, left_child, criterion=criterion, parent=node_id,
depth=depth + 1)
recurse(tree, right_child, criterion=criterion, parent=node_id,
depth=depth + 1)
else:
ranks['leaves'].append(str(node_id))
out_file.write('%d [label="(...)"' % node_id)
if filled:
# color cropped nodes grey
out_file.write(', fillcolor="#C0C0C0"')
out_file.write('] ;\n' % node_id)
if parent is not None:
# Add edge to parent
out_file.write('%d -> %d ;\n' % (parent, node_id))
own_file = False
try:
if isinstance(out_file, six.string_types):
if six.PY3:
out_file = open(out_file, "w", encoding="utf-8")
else:
out_file = open(out_file, "wb")
own_file = True
# The depth of each node for plotting with 'leaf' option
ranks = {'leaves': []}
# The colors to render each node with
colors = {'bounds': None}
out_file.write('digraph Tree {\n')
# Specify node aesthetics
out_file.write('node [shape=box')
rounded_filled = []
if filled:
rounded_filled.append('filled')
if rounded:
rounded_filled.append('rounded')
if len(rounded_filled) > 0:
out_file.write(', style="%s", color="black"'
% ", ".join(rounded_filled))
if rounded:
out_file.write(', fontname=helvetica')
out_file.write('] ;\n')
# Specify graph & edge aesthetics
if leaves_parallel:
out_file.write('graph [ranksep=equally, splines=polyline] ;\n')
if rounded:
out_file.write('edge [fontname=helvetica] ;\n')
if rotate:
out_file.write('rankdir=LR ;\n')
# Now recurse the tree and add node & edge attributes
if isinstance(decision_tree, _tree.Tree):
recurse(decision_tree, 0, criterion="impurity")
else:
recurse(decision_tree.tree_, 0, criterion=decision_tree.criterion)
# If required, draw leaf nodes at same depth as each other
if leaves_parallel:
for rank in sorted(ranks):
out_file.write("{rank=same ; " +
"; ".join(r for r in ranks[rank]) + "} ;\n")
out_file.write("}")
finally:
if own_file:
out_file.close()
|
bsd-3-clause
|
lmazuel/azure-sdk-for-python
|
azure-mgmt-sql/azure/mgmt/sql/operations/sync_members_operations.py
|
1
|
37014
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
import uuid
from msrest.pipeline import ClientRawResponse
from msrestazure.azure_exceptions import CloudError
from msrest.exceptions import DeserializationError
from msrestazure.azure_operation import AzureOperationPoller
from .. import models
class SyncMembersOperations(object):
"""SyncMembersOperations operations.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar api_version: The API version to use for the request. Constant value: "2015-05-01-preview".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self.api_version = "2015-05-01-preview"
self.config = config
def get(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
"""Gets a sync member.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: SyncMember or ClientRawResponse if raw=true
:rtype: ~azure.mgmt.sql.models.SyncMember or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
# Construct URL
url = self.get.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SyncMember', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}'}
def _create_or_update_initial(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.create_or_update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'SyncMember')
# Construct and send request
request = self._client.put(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SyncMember', response)
if response.status_code == 201:
deserialized = self._deserialize('SyncMember', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def create_or_update(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Creates or updates a sync member.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param parameters: The requested sync member resource state.
:type parameters: ~azure.mgmt.sql.models.SyncMember
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns SyncMember
or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.SyncMember]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
server_name=server_name,
database_name=database_name,
sync_group_name=sync_group_name,
sync_member_name=sync_member_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 201, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('SyncMember', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}'}
def _delete_initial(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.delete.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def delete(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
"""Deletes a sync member.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
server_name=server_name,
database_name=database_name,
sync_group_name=sync_group_name,
sync_member_name=sync_member_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202, 204]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}'}
def _update_initial(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, parameters, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.update.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct body
body_content = self._serialize.body(parameters, 'SyncMember')
# Construct and send request
request = self._client.patch(url, query_parameters)
response = self._client.send(
request, header_parameters, body_content, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('SyncMember', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
def update(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, parameters, custom_headers=None, raw=False, **operation_config):
"""Updates an existing sync member.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param parameters: The requested sync member resource state.
:type parameters: ~azure.mgmt.sql.models.SyncMember
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns SyncMember
or ClientRawResponse if raw=true
:rtype:
~msrestazure.azure_operation.AzureOperationPoller[~azure.mgmt.sql.models.SyncMember]
or ~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._update_initial(
resource_group_name=resource_group_name,
server_name=server_name,
database_name=database_name,
sync_group_name=sync_group_name,
sync_member_name=sync_member_name,
parameters=parameters,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
deserialized = self._deserialize('SyncMember', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}'}
def list_by_sync_group(
self, resource_group_name, server_name, database_name, sync_group_name, custom_headers=None, raw=False, **operation_config):
"""Lists sync members in the given sync group.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group.
:type sync_group_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of SyncMember
:rtype:
~azure.mgmt.sql.models.SyncMemberPaged[~azure.mgmt.sql.models.SyncMember]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_by_sync_group.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.SyncMemberPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.SyncMemberPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_by_sync_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers'}
def list_member_schemas(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
"""Gets a sync member database schema.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: An iterator like instance of SyncFullSchemaProperties
:rtype:
~azure.mgmt.sql.models.SyncFullSchemaPropertiesPaged[~azure.mgmt.sql.models.SyncFullSchemaProperties]
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
def internal_paging(next_link=None, raw=False):
if not next_link:
# Construct URL
url = self.list_member_schemas.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
else:
url = next_link
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.get(url, query_parameters)
response = self._client.send(
request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
return response
# Deserialize response
deserialized = models.SyncFullSchemaPropertiesPaged(internal_paging, self._deserialize.dependencies)
if raw:
header_dict = {}
client_raw_response = models.SyncFullSchemaPropertiesPaged(internal_paging, self._deserialize.dependencies, header_dict)
return client_raw_response
return deserialized
list_member_schemas.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}/schemas'}
def _refresh_member_schema_initial(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
# Construct URL
url = self.refresh_member_schema.metadata['url']
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'serverName': self._serialize.url("server_name", server_name, 'str'),
'databaseName': self._serialize.url("database_name", database_name, 'str'),
'syncGroupName': self._serialize.url("sync_group_name", sync_group_name, 'str'),
'syncMemberName': self._serialize.url("sync_member_name", sync_member_name, 'str'),
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("self.api_version", self.api_version, 'str')
# Construct headers
header_parameters = {}
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if self.config.generate_client_request_id:
header_parameters['x-ms-client-request-id'] = str(uuid.uuid1())
if custom_headers:
header_parameters.update(custom_headers)
if self.config.accept_language is not None:
header_parameters['accept-language'] = self._serialize.header("self.config.accept_language", self.config.accept_language, 'str')
# Construct and send request
request = self._client.post(url, query_parameters)
response = self._client.send(request, header_parameters, stream=False, **operation_config)
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
def refresh_member_schema(
self, resource_group_name, server_name, database_name, sync_group_name, sync_member_name, custom_headers=None, raw=False, **operation_config):
"""Refreshes a sync member database schema.
:param resource_group_name: The name of the resource group that
contains the resource. You can obtain this value from the Azure
Resource Manager API or the portal.
:type resource_group_name: str
:param server_name: The name of the server.
:type server_name: str
:param database_name: The name of the database on which the sync group
is hosted.
:type database_name: str
:param sync_group_name: The name of the sync group on which the sync
member is hosted.
:type sync_group_name: str
:param sync_member_name: The name of the sync member.
:type sync_member_name: str
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:return: An instance of AzureOperationPoller that returns None or
ClientRawResponse if raw=true
:rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or
~msrest.pipeline.ClientRawResponse
:raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>`
"""
raw_result = self._refresh_member_schema_initial(
resource_group_name=resource_group_name,
server_name=server_name,
database_name=database_name,
sync_group_name=sync_group_name,
sync_member_name=sync_member_name,
custom_headers=custom_headers,
raw=True,
**operation_config
)
if raw:
return raw_result
# Construct and send request
def long_running_send():
return raw_result.response
def get_long_running_status(status_link, headers=None):
request = self._client.get(status_link)
if headers:
request.headers.update(headers)
header_parameters = {}
header_parameters['x-ms-client-request-id'] = raw_result.response.request.headers['x-ms-client-request-id']
return self._client.send(
request, header_parameters, stream=False, **operation_config)
def get_long_running_output(response):
if response.status_code not in [200, 202]:
exp = CloudError(response)
exp.request_id = response.headers.get('x-ms-request-id')
raise exp
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
long_running_operation_timeout = operation_config.get(
'long_running_operation_timeout',
self.config.long_running_operation_timeout)
return AzureOperationPoller(
long_running_send, get_long_running_output,
get_long_running_status, long_running_operation_timeout)
refresh_member_schema.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/databases/{databaseName}/syncGroups/{syncGroupName}/syncMembers/{syncMemberName}/refreshSchema'}
|
mit
|
cwq/ijkplayer
|
tools/copyrighter/CRFile.py
|
12
|
4633
|
#! /usr/bin/env bash
#
# Copyright (C) 2013-2017 Bilibili
# Copyright (C) 2013-2017 Zhang Rui <bbcallen@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
from copyrighter.CRCopyright import CRCopyright
class CRFile:
def __init__(self, context, file):
self.dirty = False
self.context = context
self.abs_path = context.get_path_of_file(file)
self.file_ext = os.path.splitext(self.abs_path)[1][1:]
self.copyright_names = {}
self.copyright_urls = {}
self.need_insert_bilibili_copyright = True
def update(self):
if not self.dirty:
self.context.log_file('~ remain', self.abs_path)
return
tmp_path = self.abs_path + '.tmp'
if self.context.dryrun:
src = open(self.abs_path, 'r')
else:
shutil.copy2(self.abs_path, tmp_path)
src = open(tmp_path, 'r')
tmp = open(self.abs_path, 'w')
did_insert_bilibili_copyright = False
for line in src:
if self.need_insert_bilibili_copyright and not did_insert_bilibili_copyright:
copyright = CRCopyright.scan_line(self.context, line)
if copyright:
copyright.name = 'Bilibili'
copyright.url = None
if not self.context.dryrun:
tmp.write(copyright.get_line())
tmp.write("\n")
# print ' insert %s' % copyright.get_line()
did_insert_bilibili_copyright = True
if not self.context.dryrun:
tmp.write(line)
src.close()
if not self.context.dryrun:
tmp.close()
os.remove(tmp_path)
if self.need_insert_bilibili_copyright and did_insert_bilibili_copyright:
self.context.log_file('+ update', self.abs_path)
else:
self.context.log_file('~ missing', self.abs_path)
def copyright_names(self):
return self.copyright_names.keys()
def copyright_urls(self):
return self.copyright_urls.keys()
def __parse_line(self, line):
copyright = CRCopyright.scan_line(self.context, line)
if copyright:
# print "match %s" % copyright.name
self.copyright_names[copyright.name.lower()] = copyright
self.copyright_urls[copyright.url.lower()] = copyright
return True
@staticmethod
def load_from_file(context, file):
parsed_lines = 0;
crf = CRFile(context = context, file = file)
f = open(crf.abs_path, 'r')
for line in f:
if parsed_lines > 20:
break
parsed_lines += 1
crf.__parse_line(line)
f.close()
# TODO: use a visitor
if 'bilibili' not in crf.copyright_names:
if 'zhang rui' in crf.copyright_names or 'bbcallen@gmail.com' in crf.copyright_urls:
crf.need_insert_bilibili_copyright = True
crf.dirty = True
return crf
@staticmethod
def update_path(context, file):
base_name = os.path.basename(file)
abs_path = context.get_path_of_file(file)
if base_name.startswith('.'):
context.log_file('- hidden', abs_path)
return
elif context.is_black_path(abs_path):
context.log_file('- black', abs_path)
return
elif os.path.islink(abs_path):
context.log_file('- link', abs_path)
return
elif os.path.isdir(abs_path):
for sub_file in os.listdir(abs_path):
sub_path = os.path.realpath(os.path.join(abs_path, sub_file))
CRFile.update_path(context, sub_path)
elif not context.need_copyright(abs_path):
context.log_file('- nohead', abs_path)
return
elif os.path.isfile(abs_path):
src_file = CRFile.load_from_file(context, abs_path)
src_file.update()
|
gpl-2.0
|
awatts/boto
|
boto/ec2/instancetype.py
|
152
|
2273
|
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
from boto.ec2.ec2object import EC2Object
class InstanceType(EC2Object):
"""
Represents an EC2 VM Type
:ivar name: The name of the vm type
:ivar cores: The number of cpu cores for this vm type
:ivar memory: The amount of memory in megabytes for this vm type
:ivar disk: The amount of disk space in gigabytes for this vm type
"""
def __init__(self, connection=None, name=None, cores=None,
memory=None, disk=None):
super(InstanceType, self).__init__(connection)
self.connection = connection
self.name = name
self.cores = cores
self.memory = memory
self.disk = disk
def __repr__(self):
return 'InstanceType:%s-%s,%s,%s' % (self.name, self.cores,
self.memory, self.disk)
def endElement(self, name, value, connection):
if name == 'name':
self.name = value
elif name == 'cpu':
self.cores = value
elif name == 'disk':
self.disk = value
elif name == 'memory':
self.memory = value
else:
setattr(self, name, value)
|
mit
|
toastedcornflakes/scikit-learn
|
sklearn/utils/tests/test_class_weight.py
|
50
|
13151
|
import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.datasets import make_blobs
from sklearn.utils.class_weight import compute_class_weight
from sklearn.utils.class_weight import compute_sample_weight
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_raise_message
from sklearn.utils.testing import assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_warns
def test_compute_class_weight():
# Test (and demo) compute_class_weight.
y = np.asarray([2, 2, 2, 3, 3, 4])
classes = np.unique(y)
cw = assert_warns(DeprecationWarning,
compute_class_weight, "auto", classes, y)
assert_almost_equal(cw.sum(), classes.shape)
assert_true(cw[0] < cw[1] < cw[2])
cw = compute_class_weight("balanced", classes, y)
# total effect of samples is preserved
class_counts = np.bincount(y)[2:]
assert_almost_equal(np.dot(cw, class_counts), y.shape[0])
assert_true(cw[0] < cw[1] < cw[2])
def test_compute_class_weight_not_present():
# Raise error when y does not contain all class labels
classes = np.arange(4)
y = np.asarray([0, 0, 0, 1, 1, 2])
assert_raises(ValueError, compute_class_weight, "auto", classes, y)
assert_raises(ValueError, compute_class_weight, "balanced", classes, y)
# Raise error when y has items not in classes
classes = np.arange(2)
assert_raises(ValueError, compute_class_weight, "auto", classes, y)
assert_raises(ValueError, compute_class_weight, "balanced", classes, y)
assert_raises(ValueError, compute_class_weight, {0: 1., 1: 2.}, classes, y)
def test_compute_class_weight_dict():
classes = np.arange(3)
class_weights = {0: 1.0, 1: 2.0, 2: 3.0}
y = np.asarray([0, 0, 1, 2])
cw = compute_class_weight(class_weights, classes, y)
# When the user specifies class weights, compute_class_weights should just
# return them.
assert_array_almost_equal(np.asarray([1.0, 2.0, 3.0]), cw)
# When a class weight is specified that isn't in classes, a ValueError
# should get raised
msg = 'Class label 4 not present.'
class_weights = {0: 1.0, 1: 2.0, 2: 3.0, 4: 1.5}
assert_raise_message(ValueError, msg, compute_class_weight, class_weights,
classes, y)
msg = 'Class label -1 not present.'
class_weights = {-1: 5.0, 0: 1.0, 1: 2.0, 2: 3.0}
assert_raise_message(ValueError, msg, compute_class_weight, class_weights,
classes, y)
def test_compute_class_weight_invariance():
# Test that results with class_weight="balanced" is invariant wrt
# class imbalance if the number of samples is identical.
# The test uses a balanced two class dataset with 100 datapoints.
# It creates three versions, one where class 1 is duplicated
# resulting in 150 points of class 1 and 50 of class 0,
# one where there are 50 points in class 1 and 150 in class 0,
# and one where there are 100 points of each class (this one is balanced
# again).
# With balancing class weights, all three should give the same model.
X, y = make_blobs(centers=2, random_state=0)
# create dataset where class 1 is duplicated twice
X_1 = np.vstack([X] + [X[y == 1]] * 2)
y_1 = np.hstack([y] + [y[y == 1]] * 2)
# create dataset where class 0 is duplicated twice
X_0 = np.vstack([X] + [X[y == 0]] * 2)
y_0 = np.hstack([y] + [y[y == 0]] * 2)
# duplicate everything
X_ = np.vstack([X] * 2)
y_ = np.hstack([y] * 2)
# results should be identical
logreg1 = LogisticRegression(class_weight="balanced").fit(X_1, y_1)
logreg0 = LogisticRegression(class_weight="balanced").fit(X_0, y_0)
logreg = LogisticRegression(class_weight="balanced").fit(X_, y_)
assert_array_almost_equal(logreg1.coef_, logreg0.coef_)
assert_array_almost_equal(logreg.coef_, logreg0.coef_)
def test_compute_class_weight_auto_negative():
# Test compute_class_weight when labels are negative
# Test with balanced class labels.
classes = np.array([-2, -1, 0])
y = np.asarray([-1, -1, 0, 0, -2, -2])
cw = assert_warns(DeprecationWarning, compute_class_weight, "auto",
classes, y)
assert_almost_equal(cw.sum(), classes.shape)
assert_equal(len(cw), len(classes))
assert_array_almost_equal(cw, np.array([1., 1., 1.]))
cw = compute_class_weight("balanced", classes, y)
assert_equal(len(cw), len(classes))
assert_array_almost_equal(cw, np.array([1., 1., 1.]))
# Test with unbalanced class labels.
y = np.asarray([-1, 0, 0, -2, -2, -2])
cw = assert_warns(DeprecationWarning, compute_class_weight, "auto",
classes, y)
assert_almost_equal(cw.sum(), classes.shape)
assert_equal(len(cw), len(classes))
assert_array_almost_equal(cw, np.array([0.545, 1.636, 0.818]), decimal=3)
cw = compute_class_weight("balanced", classes, y)
assert_equal(len(cw), len(classes))
class_counts = np.bincount(y + 2)
assert_almost_equal(np.dot(cw, class_counts), y.shape[0])
assert_array_almost_equal(cw, [2. / 3, 2., 1.])
def test_compute_class_weight_auto_unordered():
# Test compute_class_weight when classes are unordered
classes = np.array([1, 0, 3])
y = np.asarray([1, 0, 0, 3, 3, 3])
cw = assert_warns(DeprecationWarning, compute_class_weight, "auto",
classes, y)
assert_almost_equal(cw.sum(), classes.shape)
assert_equal(len(cw), len(classes))
assert_array_almost_equal(cw, np.array([1.636, 0.818, 0.545]), decimal=3)
cw = compute_class_weight("balanced", classes, y)
class_counts = np.bincount(y)[classes]
assert_almost_equal(np.dot(cw, class_counts), y.shape[0])
assert_array_almost_equal(cw, [2., 1., 2. / 3])
def test_compute_sample_weight():
# Test (and demo) compute_sample_weight.
# Test with balanced classes
y = np.asarray([1, 1, 1, 2, 2, 2])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
sample_weight = compute_sample_weight("balanced", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
# Test with user-defined weights
sample_weight = compute_sample_weight({1: 2, 2: 1}, y)
assert_array_almost_equal(sample_weight, [2., 2., 2., 1., 1., 1.])
# Test with column vector of balanced classes
y = np.asarray([[1], [1], [1], [2], [2], [2]])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
sample_weight = compute_sample_weight("balanced", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
# Test with unbalanced classes
y = np.asarray([1, 1, 1, 2, 2, 2, 3])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
expected_auto = np.asarray([.6, .6, .6, .6, .6, .6, 1.8])
assert_array_almost_equal(sample_weight, expected_auto)
sample_weight = compute_sample_weight("balanced", y)
expected_balanced = np.array([0.7777, 0.7777, 0.7777, 0.7777, 0.7777, 0.7777, 2.3333])
assert_array_almost_equal(sample_weight, expected_balanced, decimal=4)
# Test with `None` weights
sample_weight = compute_sample_weight(None, y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1., 1.])
# Test with multi-output of balanced classes
y = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1]])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
sample_weight = compute_sample_weight("balanced", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
# Test with multi-output with user-defined weights
y = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1]])
sample_weight = compute_sample_weight([{1: 2, 2: 1}, {0: 1, 1: 2}], y)
assert_array_almost_equal(sample_weight, [2., 2., 2., 2., 2., 2.])
# Test with multi-output of unbalanced classes
y = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1], [3, -1]])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, expected_auto ** 2)
sample_weight = compute_sample_weight("balanced", y)
assert_array_almost_equal(sample_weight, expected_balanced ** 2, decimal=3)
def test_compute_sample_weight_with_subsample():
# Test compute_sample_weight with subsamples specified.
# Test with balanced classes and all samples present
y = np.asarray([1, 1, 1, 2, 2, 2])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
sample_weight = compute_sample_weight("balanced", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
# Test with column vector of balanced classes and all samples present
y = np.asarray([[1], [1], [1], [2], [2], [2]])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y)
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
sample_weight = compute_sample_weight("balanced", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1.])
# Test with a subsample
y = np.asarray([1, 1, 1, 2, 2, 2])
sample_weight = assert_warns(DeprecationWarning,
compute_sample_weight, "auto", y, range(4))
assert_array_almost_equal(sample_weight, [.5, .5, .5, 1.5, 1.5, 1.5])
sample_weight = compute_sample_weight("balanced", y, range(4))
assert_array_almost_equal(sample_weight, [2. / 3, 2. / 3,
2. / 3, 2., 2., 2.])
# Test with a bootstrap subsample
y = np.asarray([1, 1, 1, 2, 2, 2])
sample_weight = assert_warns(DeprecationWarning, compute_sample_weight,
"auto", y, [0, 1, 1, 2, 2, 3])
expected_auto = np.asarray([1 / 3., 1 / 3., 1 / 3., 5 / 3., 5 / 3., 5 / 3.])
assert_array_almost_equal(sample_weight, expected_auto)
sample_weight = compute_sample_weight("balanced", y, [0, 1, 1, 2, 2, 3])
expected_balanced = np.asarray([0.6, 0.6, 0.6, 3., 3., 3.])
assert_array_almost_equal(sample_weight, expected_balanced)
# Test with a bootstrap subsample for multi-output
y = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1]])
sample_weight = assert_warns(DeprecationWarning, compute_sample_weight,
"auto", y, [0, 1, 1, 2, 2, 3])
assert_array_almost_equal(sample_weight, expected_auto ** 2)
sample_weight = compute_sample_weight("balanced", y, [0, 1, 1, 2, 2, 3])
assert_array_almost_equal(sample_weight, expected_balanced ** 2)
# Test with a missing class
y = np.asarray([1, 1, 1, 2, 2, 2, 3])
sample_weight = assert_warns(DeprecationWarning, compute_sample_weight,
"auto", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1., 0.])
sample_weight = compute_sample_weight("balanced", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1., 0.])
# Test with a missing class for multi-output
y = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1], [2, 2]])
sample_weight = assert_warns(DeprecationWarning, compute_sample_weight,
"auto", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1., 0.])
sample_weight = compute_sample_weight("balanced", y, range(6))
assert_array_almost_equal(sample_weight, [1., 1., 1., 1., 1., 1., 0.])
def test_compute_sample_weight_errors():
# Test compute_sample_weight raises errors expected.
# Invalid preset string
y = np.asarray([1, 1, 1, 2, 2, 2])
y_ = np.asarray([[1, 0], [1, 0], [1, 0], [2, 1], [2, 1], [2, 1]])
assert_raises(ValueError, compute_sample_weight, "ni", y)
assert_raises(ValueError, compute_sample_weight, "ni", y, range(4))
assert_raises(ValueError, compute_sample_weight, "ni", y_)
assert_raises(ValueError, compute_sample_weight, "ni", y_, range(4))
# Not "auto" for subsample
assert_raises(ValueError,
compute_sample_weight, {1: 2, 2: 1}, y, range(4))
# Not a list or preset for multi-output
assert_raises(ValueError, compute_sample_weight, {1: 2, 2: 1}, y_)
# Incorrect length list for multi-output
assert_raises(ValueError, compute_sample_weight, [{1: 2, 2: 1}], y_)
|
bsd-3-clause
|
lenw/ansible-modules-core
|
cloud/amazon/ec2_snapshot.py
|
67
|
10167
|
#!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_snapshot
short_description: creates a snapshot from an existing volume
description:
- creates an EC2 snapshot from an existing EBS volume
version_added: "1.5"
options:
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
volume_id:
description:
- volume from which to take the snapshot
required: false
description:
description:
- description to be applied to the snapshot
required: false
instance_id:
description:
- instance that has the required volume to snapshot mounted
required: false
device_name:
description:
- device name of a mounted volume to be snapshotted
required: false
snapshot_tags:
description:
- a hash/dictionary of tags to add to the snapshot
required: false
version_added: "1.6"
wait:
description:
- wait for the snapshot to be ready
choices: ['yes', 'no']
required: false
default: yes
version_added: "1.5.1"
wait_timeout:
description:
- how long before wait gives up, in seconds
- specify 0 to wait forever
required: false
default: 0
version_added: "1.5.1"
state:
description:
- whether to add or create a snapshot
required: false
default: present
choices: ['absent', 'present']
version_added: "1.9"
snapshot_id:
description:
- snapshot id to remove
required: false
version_added: "1.9"
last_snapshot_min_age:
description:
- If the volume's most recent snapshot has started less than `last_snapshot_min_age' minutes ago, a new snapshot will not be created.
required: false
default: 0
version_added: "1.9"
author: "Will Thames (@willthames)"
extends_documentation_fragment: aws
'''
EXAMPLES = '''
# Simple snapshot of volume using volume_id
- ec2_snapshot:
volume_id: vol-abcdef12
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume mounted on device_name attached to instance_id
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
description: snapshot of /data from DB123 taken 2013/11/28 12:18:32
# Snapshot of volume with tagging
- ec2_snapshot:
instance_id: i-12345678
device_name: /dev/sdb1
snapshot_tags:
frequency: hourly
source: /data
# Remove a snapshot
- local_action:
module: ec2_snapshot
snapshot_id: snap-abcd1234
state: absent
# Create a snapshot only if the most recent one is older than 1 hour
- local_action:
module: ec2_snapshot
volume_id: vol-abcdef12
last_snapshot_min_age: 60
'''
import time
import datetime
try:
import boto.ec2
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
# Find the most recent snapshot
def _get_snapshot_starttime(snap):
return datetime.datetime.strptime(snap.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
def _get_most_recent_snapshot(snapshots, max_snapshot_age_secs=None, now=None):
"""
Gets the most recently created snapshot and optionally filters the result
if the snapshot is too old
:param snapshots: list of snapshots to search
:param max_snapshot_age_secs: filter the result if its older than this
:param now: simulate time -- used for unit testing
:return:
"""
if len(snapshots) == 0:
return None
if not now:
now = datetime.datetime.utcnow()
youngest_snapshot = min(snapshots, key=_get_snapshot_starttime)
# See if the snapshot is younger that the given max age
snapshot_start = datetime.datetime.strptime(youngest_snapshot.start_time, '%Y-%m-%dT%H:%M:%S.000Z')
snapshot_age = now - snapshot_start
if max_snapshot_age_secs is not None:
if snapshot_age.total_seconds() > max_snapshot_age_secs:
return None
return youngest_snapshot
def _create_with_wait(snapshot, wait_timeout_secs, sleep_func=time.sleep):
"""
Wait for the snapshot to be created
:param snapshot:
:param wait_timeout_secs: fail this step after this many seconds
:param sleep_func:
:return:
"""
time_waited = 0
snapshot.update()
while snapshot.status != 'completed':
sleep_func(3)
snapshot.update()
time_waited += 3
if wait_timeout_secs and time_waited > wait_timeout_secs:
return False
return True
def create_snapshot(module, ec2, state=None, description=None, wait=None,
wait_timeout=None, volume_id=None, instance_id=None,
snapshot_id=None, device_name=None, snapshot_tags=None,
last_snapshot_min_age=None):
snapshot = None
changed = False
required = [volume_id, snapshot_id, instance_id]
if required.count(None) != len(required) - 1: # only 1 must be set
module.fail_json(msg='One and only one of volume_id or instance_id or snapshot_id must be specified')
if instance_id and not device_name or device_name and not instance_id:
module.fail_json(msg='Instance ID and device name must both be specified')
if instance_id:
try:
volumes = ec2.get_all_volumes(filters={'attachment.instance-id': instance_id, 'attachment.device': device_name})
except boto.exception.BotoServerError, e:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
if not volumes:
module.fail_json(msg="Could not find volume with name %s attached to instance %s" % (device_name, instance_id))
volume_id = volumes[0].id
if state == 'absent':
if not snapshot_id:
module.fail_json(msg = 'snapshot_id must be set when state is absent')
try:
ec2.delete_snapshot(snapshot_id)
except boto.exception.BotoServerError, e:
# exception is raised if snapshot does not exist
if e.error_code == 'InvalidSnapshot.NotFound':
module.exit_json(changed=False)
else:
module.fail_json(msg = "%s: %s" % (e.error_code, e.error_message))
# successful delete
module.exit_json(changed=True)
if last_snapshot_min_age > 0:
try:
current_snapshots = ec2.get_all_snapshots(filters={'volume_id': volume_id})
except boto.exception.BotoServerError, e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
last_snapshot_min_age = last_snapshot_min_age * 60 # Convert to seconds
snapshot = _get_most_recent_snapshot(current_snapshots,
max_snapshot_age_secs=last_snapshot_min_age)
try:
# Create a new snapshot if we didn't find an existing one to use
if snapshot is None:
snapshot = ec2.create_snapshot(volume_id, description=description)
changed = True
if wait:
if not _create_with_wait(snapshot, wait_timeout):
module.fail_json(msg='Timed out while creating snapshot.')
if snapshot_tags:
for k, v in snapshot_tags.items():
snapshot.add_tag(k, v)
except boto.exception.BotoServerError, e:
module.fail_json(msg="%s: %s" % (e.error_code, e.error_message))
module.exit_json(changed=changed,
snapshot_id=snapshot.id,
volume_id=snapshot.volume_id,
volume_size=snapshot.volume_size,
tags=snapshot.tags.copy())
def create_snapshot_ansible_module():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
volume_id = dict(),
description = dict(),
instance_id = dict(),
snapshot_id = dict(),
device_name = dict(),
wait = dict(type='bool', default=True),
wait_timeout = dict(type='int', default=0),
last_snapshot_min_age = dict(type='int', default=0),
snapshot_tags = dict(type='dict', default=dict()),
state = dict(choices=['absent','present'], default='present'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
return module
def main():
module = create_snapshot_ansible_module()
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
volume_id = module.params.get('volume_id')
snapshot_id = module.params.get('snapshot_id')
description = module.params.get('description')
instance_id = module.params.get('instance_id')
device_name = module.params.get('device_name')
wait = module.params.get('wait')
wait_timeout = module.params.get('wait_timeout')
last_snapshot_min_age = module.params.get('last_snapshot_min_age')
snapshot_tags = module.params.get('snapshot_tags')
state = module.params.get('state')
ec2 = ec2_connect(module)
create_snapshot(
module=module,
state=state,
description=description,
wait=wait,
wait_timeout=wait_timeout,
ec2=ec2,
volume_id=volume_id,
instance_id=instance_id,
snapshot_id=snapshot_id,
device_name=device_name,
snapshot_tags=snapshot_tags,
last_snapshot_min_age=last_snapshot_min_age
)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
FCP-INDI/nipype
|
nipype/interfaces/mipav/tests/test_auto_MedicAlgorithmThresholdToBinaryMask.py
|
12
|
1499
|
# AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from ....testing import assert_equal
from ..developer import MedicAlgorithmThresholdToBinaryMask
def test_MedicAlgorithmThresholdToBinaryMask_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
inLabel=dict(argstr='--inLabel %s',
sep=';',
),
inMaximum=dict(argstr='--inMaximum %f',
),
inMinimum=dict(argstr='--inMinimum %f',
),
inUse=dict(argstr='--inUse %s',
),
null=dict(argstr='--null %s',
),
outBinary=dict(argstr='--outBinary %s',
sep=';',
),
terminal_output=dict(nohash=True,
),
xDefaultMem=dict(argstr='-xDefaultMem %d',
),
xMaxProcess=dict(argstr='-xMaxProcess %d',
usedefault=True,
),
xPrefExt=dict(argstr='--xPrefExt %s',
),
)
inputs = MedicAlgorithmThresholdToBinaryMask.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_MedicAlgorithmThresholdToBinaryMask_outputs():
output_map = dict()
outputs = MedicAlgorithmThresholdToBinaryMask.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
yield assert_equal, getattr(outputs.traits()[key], metakey), value
|
bsd-3-clause
|
proximate/proximate
|
scheduler.py
|
1
|
6380
|
#
# Proximate - Peer-to-peer social networking
#
# Copyright (c) 2008-2011 Nokia Corporation
#
# All rights reserved.
#
# This software is licensed under The Clear BSD license.
# See the LICENSE file for more details.
#
from gobject import timeout_add_seconds
from datetime import datetime, timedelta
from os import listdir, remove
from os.path import join
from tempfile import mkstemp
from time import time
from ossupport import xclose
from plugins import Plugin, get_plugin_by_type
from support import die, warning
from proximateprotocol import PLUGIN_TYPE_SCHEDULER, PLUGIN_TYPE_COMMUNITY
from utils import str_to_int
# seconds
SCHEDULE_PERIOD = 5
class Periodic_Timer:
def __init__(self, period, callback, ctx):
assert((period % SCHEDULE_PERIOD) == 0)
self.div = period // SCHEDULE_PERIOD
self.callback = callback
self.ctx = ctx
self.step = 0
def call(self, t):
return self.callback(t, self.ctx)
def test(self, t):
self.step += 1
if self.step != self.div:
return True
self.step = 0
return self.call(t)
class Scheduler_Plugin(Plugin):
# These constants can be used to represent time deltas (relative times)
DAY = timedelta(1)
SECOND = timedelta(0, 1)
EXPIRE_PREFIX = 'expiringfile'
def __init__(self):
self.register_plugin(PLUGIN_TYPE_SCHEDULER)
self.community = None
self.callbacks = []
self.periodic = []
timeout_add_seconds(SCHEDULE_PERIOD, self.schedule)
def call_at(self, dt, callback, ctx=None):
""" Call callback(ctx) at dt, where dt is datetime.datetime object """
self.callbacks.append((dt, callback, ctx))
def call_in(self, rel, callback, ctx=None):
""" Call callback(ctx) is a datetime.timedelta object """
dt = datetime.now() + rel
self.call_at(dt, callback, ctx)
def call_periodic(self, period, callback, ctx=None, callnow=False):
""" Install a periodic timer. Returns the timer iff it is
installed, otherwise None.
Period is a datetime.timedelta object. The period must be a
multiple of SCHEDULE_PERIOD.
The callback should return False or True. The timer is removed
iff False is returned from the callback. The timer calls
callback(t, ctx), where t is a time value returned by time.time().
The timer is not installed if callnow == True and the first
callback returns False. """
secs = 3600 * 24 * period.days + period.seconds
timer = Periodic_Timer(secs, callback, ctx)
success = True
if callnow:
success = timer.call(time())
if success:
self.periodic.append(timer)
return timer
return None
def parse_filename_datetime(self, name):
fields = name.split('-')
if len(fields) < 5:
return None
try:
year = int(fields[1])
month = int(fields[2])
day = int(fields[3])
secs = int(fields[4])
except ValueError:
return None
hour = secs // 3600
if hour >= 24:
return None
secs = secs % 3600
minute = secs // 60
second = secs % 60
return datetime(year, month, day, hour, minute, second)
def remove_garbage(self, t, ctx):
now = datetime.now()
dname = self.community.get_user_dir()
for fname in listdir(dname):
if not fname.startswith(self.EXPIRE_PREFIX):
continue
path = join(dname, fname)
dt = self.parse_filename_datetime(fname)
if dt == None:
warning('Bad expiring file name, just remove it: %s\n' % path)
if dt == None or dt <= now:
try:
remove(path)
warning('Garbage collected %s\n' % path)
except OSError:
warning('Could not delete %s\n' % path)
return True
def get_expiring_file(self, dt=None, rel=None):
""" Create a temp file, which expires at a given time. The temp file
is stored under user's proximate directory. The file will expire
(be deleted) after the given time. The actual deletion time is
not very accurate.
dt is a point in time, which is an instance of datetime.datetime.
If dt == None, it is assumed to be now. If rel == None,
it is assumed to be zero. Otherwise it is assumed to be a
relative delay with respect to dt.
rel is an instance of datetime.timedelta.
Hint: Use scheduler.DAY and scheduler.SECOND to specify relative
times """
assert(dt == None or isinstance(dt, datetime))
assert(rel == None or isinstance(rel, timedelta))
if dt == None:
dt = datetime.now()
if rel != None:
dt = dt + rel
# ISO date: YYYY-MM-DD-s, where s is a number of seconds in the day
isodate = str(dt.date())
seconds = str(dt.hour * 3600 + dt.minute * 60 + dt.second)
prefix = '%s-%s-%s-' % (self.EXPIRE_PREFIX, isodate, seconds)
directory = self.community.get_user_dir()
try:
(fd, fname) = mkstemp(prefix=prefix, dir=directory)
except OSError:
warning('expiring_file: mkstemp() failed\n')
return None
xclose(fd)
return fname
def ready(self):
self.community = get_plugin_by_type(PLUGIN_TYPE_COMMUNITY)
# Cleanup garbage files every 5 mins
self.call_periodic(300 * self.SECOND, self.remove_garbage)
def remove_periodic(self, timer):
self.periodic.remove(timer)
def schedule(self):
now = datetime.now()
i = 0
while i < len(self.callbacks):
(t, callback, ctx) = self.callbacks[i]
if now >= t:
callback(ctx)
self.callbacks.pop(i)
else:
i += 1
t = time()
i = 0
while i < len(self.periodic):
if not self.periodic[i].test(t):
self.periodic.pop(i)
continue
i += 1
return True
def init(options):
Scheduler_Plugin()
|
bsd-3-clause
|
dimroc/tensorflow-mnist-tutorial
|
lib/python3.6/site-packages/tensorflow/python/ops/rnn_cell_impl.py
|
31
|
5806
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Module implementing RNN Cells."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.util import nest
def _state_size_with_prefix(state_size, prefix=None):
"""Helper function that enables int or TensorShape shape specification.
This function takes a size specification, which can be an integer or a
TensorShape, and converts it into a list of integers. One may specify any
additional dimensions that precede the final state size specification.
Args:
state_size: TensorShape or int that specifies the size of a tensor.
prefix: optional additional list of dimensions to prepend.
Returns:
result_state_size: list of dimensions the resulting tensor size.
"""
result_state_size = tensor_shape.as_shape(state_size).as_list()
if prefix is not None:
if not isinstance(prefix, list):
raise TypeError("prefix of _state_size_with_prefix should be a list.")
result_state_size = prefix + result_state_size
return result_state_size
class _RNNCell(object):
"""Abstract object representing an RNN cell.
The definition of cell in this package differs from the definition used in the
literature. In the literature, cell refers to an object with a single scalar
output. The definition in this package refers to a horizontal array of such
units.
An RNN cell, in the most abstract setting, is anything that has
a state and performs some operation that takes a matrix of inputs.
This operation results in an output matrix with `self.output_size` columns.
If `self.state_size` is an integer, this operation also results in a new
state matrix with `self.state_size` columns. If `self.state_size` is a
tuple of integers, then it results in a tuple of `len(state_size)` state
matrices, each with a column size corresponding to values in `state_size`.
This module provides a number of basic commonly used RNN cells, such as
LSTM (Long Short Term Memory) or GRU (Gated Recurrent Unit), and a number
of operators that allow add dropouts, projections, or embeddings for inputs.
Constructing multi-layer cells is supported by the class `MultiRNNCell`,
or by calling the `rnn` ops several times. Every `RNNCell` must have the
properties below and implement `__call__` with the following signature.
"""
def __call__(self, inputs, state, scope=None):
"""Run this RNN cell on inputs, starting from the given state.
Args:
inputs: `2-D` tensor with shape `[batch_size x input_size]`.
state: if `self.state_size` is an integer, this should be a `2-D Tensor`
with shape `[batch_size x self.state_size]`. Otherwise, if
`self.state_size` is a tuple of integers, this should be a tuple
with shapes `[batch_size x s] for s in self.state_size`.
scope: VariableScope for the created subgraph; defaults to class name.
Returns:
A pair containing:
- Output: A `2-D` tensor with shape `[batch_size x self.output_size]`.
- New state: Either a single `2-D` tensor, or a tuple of tensors matching
the arity and shapes of `state`.
"""
raise NotImplementedError("Abstract method")
@property
def state_size(self):
"""size(s) of state(s) used by this cell.
It can be represented by an Integer, a TensorShape or a tuple of Integers
or TensorShapes.
"""
raise NotImplementedError("Abstract method")
@property
def output_size(self):
"""Integer or TensorShape: size of outputs produced by this cell."""
raise NotImplementedError("Abstract method")
def zero_state(self, batch_size, dtype):
"""Return zero-filled state tensor(s).
Args:
batch_size: int, float, or unit Tensor representing the batch size.
dtype: the data type to use for the state.
Returns:
If `state_size` is an int or TensorShape, then the return value is a
`N-D` tensor of shape `[batch_size x state_size]` filled with zeros.
If `state_size` is a nested list or tuple, then the return value is
a nested list or tuple (of the same structure) of `2-D` tensors with
the shapes `[batch_size x s]` for each s in `state_size`.
"""
state_size = self.state_size
if nest.is_sequence(state_size):
state_size_flat = nest.flatten(state_size)
zeros_flat = [
array_ops.zeros(
array_ops.stack(_state_size_with_prefix(
s, prefix=[batch_size])),
dtype=dtype) for s in state_size_flat
]
for s, z in zip(state_size_flat, zeros_flat):
z.set_shape(_state_size_with_prefix(s, prefix=[None]))
zeros = nest.pack_sequence_as(structure=state_size,
flat_sequence=zeros_flat)
else:
zeros_size = _state_size_with_prefix(state_size, prefix=[batch_size])
zeros = array_ops.zeros(array_ops.stack(zeros_size), dtype=dtype)
zeros.set_shape(_state_size_with_prefix(state_size, prefix=[None]))
return zeros
|
apache-2.0
|
aazen/DayOne
|
slyd/slyd/resource.py
|
8
|
1299
|
import json, errno
from twisted.web.resource import Resource, NoResource, ErrorPage
class SlydJsonResource(Resource):
"""Base Resource for Slyd Resources
This sets the content type to JSON and handles errors
"""
def render(self, request):
request.setResponseCode(200)
request.setHeader('Content-Type', 'application/json')
try:
return Resource.render(self, request)
except IOError as ex:
if ex.errno == errno.ENOENT:
return NoResource().render(request)
else:
raise
except ErrorPage as ex:
return ex.render(request)
def error(self, request, status, why):
raise ErrorPage(request, status, why)
def bad_request(self, why):
self.error(400, "Bad Request", why)
def read_json(self, request):
try:
return json.load(request.content)
except ValueError as ex:
self.bad_request("Error parsing json. %s" % ex.message)
class SlydJsonObjectResource(SlydJsonResource):
"""Extends SlydJsonResource, converting
the returned data to JSON
"""
def render(self, request):
resp = SlydJsonResource.render(self, request)
if resp is not None:
return json.dumps(resp)
|
bsd-3-clause
|
mihailignatenko/erp
|
openerp/modules/registry.py
|
220
|
19731
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" Models registries.
"""
from collections import Mapping, defaultdict
import logging
import os
import threading
import openerp
from .. import SUPERUSER_ID
from openerp.tools import assertion_report, lazy_property, classproperty, config
from openerp.tools.lru import LRU
_logger = logging.getLogger(__name__)
class Registry(Mapping):
""" Model registry for a particular database.
The registry is essentially a mapping between model names and model
instances. There is one registry instance per database.
"""
def __init__(self, db_name):
super(Registry, self).__init__()
self.models = {} # model name/model instance mapping
self._sql_error = {}
self._store_function = {}
self._pure_function_fields = {} # {model: [field, ...], ...}
self._init = True
self._init_parent = {}
self._assertion_report = assertion_report.assertion_report()
self._fields_by_model = None
# modules fully loaded (maintained during init phase by `loading` module)
self._init_modules = set()
self.db_name = db_name
self._db = openerp.sql_db.db_connect(db_name)
# special cursor for test mode; None means "normal" mode
self.test_cr = None
# Indicates that the registry is
self.ready = False
# Inter-process signaling (used only when openerp.multi_process is True):
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
self.base_registry_signaling_sequence = None
self.base_cache_signaling_sequence = None
self.cache = LRU(8192)
# Flag indicating if at least one model cache has been cleared.
# Useful only in a multi-process context.
self._any_cache_cleared = False
cr = self.cursor()
has_unaccent = openerp.modules.db.has_unaccent(cr)
if openerp.tools.config['unaccent'] and not has_unaccent:
_logger.warning("The option --unaccent was given but no unaccent() function was found in database.")
self.has_unaccent = openerp.tools.config['unaccent'] and has_unaccent
cr.close()
#
# Mapping abstract methods implementation
# => mixin provides methods keys, items, values, get, __eq__, and __ne__
#
def __len__(self):
""" Return the size of the registry. """
return len(self.models)
def __iter__(self):
""" Return an iterator over all model names. """
return iter(self.models)
def __getitem__(self, model_name):
""" Return the model with the given name or raise KeyError if it doesn't exist."""
return self.models[model_name]
def __call__(self, model_name):
""" Same as ``self[model_name]``. """
return self.models[model_name]
@lazy_property
def pure_function_fields(self):
""" Return the list of pure function fields (field objects) """
fields = []
for mname, fnames in self._pure_function_fields.iteritems():
model_fields = self[mname]._fields
for fname in fnames:
fields.append(model_fields[fname])
return fields
def clear_manual_fields(self):
""" Invalidate the cache for manual fields. """
self._fields_by_model = None
def get_manual_fields(self, cr, model_name):
""" Return the manual fields (as a dict) for the given model. """
if self._fields_by_model is None:
# Query manual fields for all models at once
self._fields_by_model = dic = defaultdict(dict)
cr.execute('SELECT * FROM ir_model_fields WHERE state=%s', ('manual',))
for field in cr.dictfetchall():
dic[field['model']][field['name']] = field
return self._fields_by_model[model_name]
def do_parent_store(self, cr):
for o in self._init_parent:
self.get(o)._parent_store_compute(cr)
self._init = False
def obj_list(self):
""" Return the list of model names in this registry."""
return self.keys()
def add(self, model_name, model):
""" Add or replace a model in the registry."""
self.models[model_name] = model
def load(self, cr, module):
""" Load a given module in the registry.
At the Python level, the modules are already loaded, but not yet on a
per-registry level. This method populates a registry with the given
modules, i.e. it instanciates all the classes of a the given module
and registers them in the registry.
"""
from .. import models
models_to_load = [] # need to preserve loading order
lazy_property.reset_all(self)
# Instantiate registered classes (via the MetaModel automatic discovery
# or via explicit constructor call), and add them to the pool.
for cls in models.MetaModel.module_to_models.get(module.name, []):
# models register themselves in self.models
model = cls._build_model(self, cr)
if model._name not in models_to_load:
# avoid double-loading models whose declaration is split
models_to_load.append(model._name)
return [self.models[m] for m in models_to_load]
def setup_models(self, cr, partial=False):
""" Complete the setup of models.
This must be called after loading modules and before using the ORM.
:param partial: ``True`` if all models have not been loaded yet.
"""
lazy_property.reset_all(self)
# load custom models
ir_model = self['ir.model']
cr.execute('select model from ir_model where state=%s', ('manual',))
for (model_name,) in cr.fetchall():
ir_model.instanciate(cr, SUPERUSER_ID, model_name, {})
# prepare the setup on all models
for model in self.models.itervalues():
model._prepare_setup(cr, SUPERUSER_ID)
# do the actual setup from a clean state
self._m2m = {}
for model in self.models.itervalues():
model._setup_base(cr, SUPERUSER_ID, partial)
for model in self.models.itervalues():
model._setup_fields(cr, SUPERUSER_ID)
for model in self.models.itervalues():
model._setup_complete(cr, SUPERUSER_ID)
def clear_caches(self):
""" Clear the caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models.
"""
for model in self.models.itervalues():
model.clear_caches()
# Special case for ir_ui_menu which does not use openerp.tools.ormcache.
ir_ui_menu = self.models.get('ir.ui.menu')
if ir_ui_menu is not None:
ir_ui_menu.clear_cache()
# Useful only in a multi-process context.
def reset_any_cache_cleared(self):
self._any_cache_cleared = False
# Useful only in a multi-process context.
def any_cache_cleared(self):
return self._any_cache_cleared
@classmethod
def setup_multi_process_signaling(cls, cr):
if not openerp.multi_process:
return None, None
# Inter-process signaling:
# The `base_registry_signaling` sequence indicates the whole registry
# must be reloaded.
# The `base_cache_signaling sequence` indicates all caches must be
# invalidated (i.e. cleared).
cr.execute("""SELECT sequence_name FROM information_schema.sequences WHERE sequence_name='base_registry_signaling'""")
if not cr.fetchall():
cr.execute("""CREATE SEQUENCE base_registry_signaling INCREMENT BY 1 START WITH 1""")
cr.execute("""SELECT nextval('base_registry_signaling')""")
cr.execute("""CREATE SEQUENCE base_cache_signaling INCREMENT BY 1 START WITH 1""")
cr.execute("""SELECT nextval('base_cache_signaling')""")
cr.execute("""
SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess load registry signaling: [Registry: # %s] "\
"[Cache: # %s]",
r, c)
return r, c
def enter_test_mode(self):
""" Enter the 'test' mode, where one cursor serves several requests. """
assert self.test_cr is None
self.test_cr = self._db.test_cursor()
RegistryManager.enter_test_mode()
def leave_test_mode(self):
""" Leave the test mode. """
assert self.test_cr is not None
self.test_cr.force_close()
self.test_cr = None
RegistryManager.leave_test_mode()
def cursor(self):
""" Return a new cursor for the database. The cursor itself may be used
as a context manager to commit/rollback and close automatically.
"""
cr = self.test_cr
if cr is not None:
# While in test mode, we use one special cursor across requests. The
# test cursor uses a reentrant lock to serialize accesses. The lock
# is granted here by cursor(), and automatically released by the
# cursor itself in its method close().
cr.acquire()
return cr
return self._db.cursor()
class DummyRLock(object):
""" Dummy reentrant lock, to be used while running rpc and js tests """
def acquire(self):
pass
def release(self):
pass
def __enter__(self):
self.acquire()
def __exit__(self, type, value, traceback):
self.release()
class RegistryManager(object):
""" Model registries manager.
The manager is responsible for creation and deletion of model
registries (essentially database connection/model registry pairs).
"""
_registries = None
_lock = threading.RLock()
_saved_lock = None
@classproperty
def registries(cls):
if cls._registries is None:
size = config.get('registry_lru_size', None)
if not size:
# Size the LRU depending of the memory limits
if os.name != 'posix':
# cannot specify the memory limit soft on windows...
size = 42
else:
# A registry takes 10MB of memory on average, so we reserve
# 10Mb (registry) + 5Mb (working memory) per registry
avgsz = 15 * 1024 * 1024
size = int(config['limit_memory_soft'] / avgsz)
cls._registries = LRU(size)
return cls._registries
@classmethod
def lock(cls):
""" Return the current registry lock. """
return cls._lock
@classmethod
def enter_test_mode(cls):
""" Enter the 'test' mode, where the registry is no longer locked. """
assert cls._saved_lock is None
cls._lock, cls._saved_lock = DummyRLock(), cls._lock
@classmethod
def leave_test_mode(cls):
""" Leave the 'test' mode. """
assert cls._saved_lock is not None
cls._lock, cls._saved_lock = cls._saved_lock, None
@classmethod
def get(cls, db_name, force_demo=False, status=None, update_module=False):
""" Return a registry for a given database name."""
with cls.lock():
try:
return cls.registries[db_name]
except KeyError:
return cls.new(db_name, force_demo, status,
update_module)
finally:
# set db tracker - cleaned up at the WSGI
# dispatching phase in openerp.service.wsgi_server.application
threading.current_thread().dbname = db_name
@classmethod
def new(cls, db_name, force_demo=False, status=None,
update_module=False):
""" Create and return a new registry for a given database name.
The (possibly) previous registry for that database name is discarded.
"""
import openerp.modules
with cls.lock():
with openerp.api.Environment.manage():
registry = Registry(db_name)
# Initializing a registry will call general code which will in
# turn call registries.get (this object) to obtain the registry
# being initialized. Make it available in the registries
# dictionary then remove it if an exception is raised.
cls.delete(db_name)
cls.registries[db_name] = registry
try:
with registry.cursor() as cr:
seq_registry, seq_cache = Registry.setup_multi_process_signaling(cr)
registry.base_registry_signaling_sequence = seq_registry
registry.base_cache_signaling_sequence = seq_cache
# This should be a method on Registry
openerp.modules.load_modules(registry._db, force_demo, status, update_module)
except Exception:
del cls.registries[db_name]
raise
# load_modules() above can replace the registry by calling
# indirectly new() again (when modules have to be uninstalled).
# Yeah, crazy.
registry = cls.registries[db_name]
cr = registry.cursor()
try:
registry.do_parent_store(cr)
cr.commit()
finally:
cr.close()
registry.ready = True
if update_module:
# only in case of update, otherwise we'll have an infinite reload loop!
cls.signal_registry_change(db_name)
return registry
@classmethod
def delete(cls, db_name):
"""Delete the registry linked to a given database. """
with cls.lock():
if db_name in cls.registries:
cls.registries[db_name].clear_caches()
del cls.registries[db_name]
@classmethod
def delete_all(cls):
"""Delete all the registries. """
with cls.lock():
for db_name in cls.registries.keys():
cls.delete(db_name)
@classmethod
def clear_caches(cls, db_name):
"""Clear caches
This clears the caches associated to methods decorated with
``tools.ormcache`` or ``tools.ormcache_multi`` for all the models
of the given database name.
This method is given to spare you a ``RegistryManager.get(db_name)``
that would loads the given database if it was not already loaded.
"""
with cls.lock():
if db_name in cls.registries:
cls.registries[db_name].clear_caches()
@classmethod
def check_registry_signaling(cls, db_name):
"""
Check if the modules have changed and performs all necessary operations to update
the registry of the corresponding database.
:returns: True if changes has been detected in the database and False otherwise.
"""
changed = False
if openerp.multi_process and db_name in cls.registries:
registry = cls.get(db_name)
cr = registry.cursor()
try:
cr.execute("""
SELECT base_registry_signaling.last_value,
base_cache_signaling.last_value
FROM base_registry_signaling, base_cache_signaling""")
r, c = cr.fetchone()
_logger.debug("Multiprocess signaling check: [Registry - old# %s new# %s] "\
"[Cache - old# %s new# %s]",
registry.base_registry_signaling_sequence, r,
registry.base_cache_signaling_sequence, c)
# Check if the model registry must be reloaded (e.g. after the
# database has been updated by another process).
if registry.base_registry_signaling_sequence is not None and registry.base_registry_signaling_sequence != r:
changed = True
_logger.info("Reloading the model registry after database signaling.")
registry = cls.new(db_name)
# Check if the model caches must be invalidated (e.g. after a write
# occured on another process). Don't clear right after a registry
# has been reload.
elif registry.base_cache_signaling_sequence is not None and registry.base_cache_signaling_sequence != c:
changed = True
_logger.info("Invalidating all model caches after database signaling.")
registry.clear_caches()
registry.reset_any_cache_cleared()
registry.base_registry_signaling_sequence = r
registry.base_cache_signaling_sequence = c
finally:
cr.close()
return changed
@classmethod
def signal_caches_change(cls, db_name):
if openerp.multi_process and db_name in cls.registries:
# Check the registries if any cache has been cleared and signal it
# through the database to other processes.
registry = cls.get(db_name)
if registry.any_cache_cleared():
_logger.info("At least one model cache has been cleared, signaling through the database.")
cr = registry.cursor()
r = 1
try:
cr.execute("select nextval('base_cache_signaling')")
r = cr.fetchone()[0]
finally:
cr.close()
registry.base_cache_signaling_sequence = r
registry.reset_any_cache_cleared()
@classmethod
def signal_registry_change(cls, db_name):
if openerp.multi_process and db_name in cls.registries:
_logger.info("Registry changed, signaling through the database")
registry = cls.get(db_name)
cr = registry.cursor()
r = 1
try:
cr.execute("select nextval('base_registry_signaling')")
r = cr.fetchone()[0]
finally:
cr.close()
registry.base_registry_signaling_sequence = r
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
PatKayongo/patkayongo.github.io
|
node_modules/pygmentize-bundled/vendor/pygments/build-2.7/pygments/styles/manni.py
|
364
|
2374
|
# -*- coding: utf-8 -*-
"""
pygments.styles.manni
~~~~~~~~~~~~~~~~~~~~~
A colorful style, inspired by the terminal highlighting style.
This is a port of the style used in the `php port`_ of pygments
by Manni. The style is called 'default' there.
:copyright: Copyright 2006-2013 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.style import Style
from pygments.token import Keyword, Name, Comment, String, Error, \
Number, Operator, Generic, Whitespace
class ManniStyle(Style):
"""
A colorful style, inspired by the terminal highlighting style.
"""
background_color = '#f0f3f3'
styles = {
Whitespace: '#bbbbbb',
Comment: 'italic #0099FF',
Comment.Preproc: 'noitalic #009999',
Comment.Special: 'bold',
Keyword: 'bold #006699',
Keyword.Pseudo: 'nobold',
Keyword.Type: '#007788',
Operator: '#555555',
Operator.Word: 'bold #000000',
Name.Builtin: '#336666',
Name.Function: '#CC00FF',
Name.Class: 'bold #00AA88',
Name.Namespace: 'bold #00CCFF',
Name.Exception: 'bold #CC0000',
Name.Variable: '#003333',
Name.Constant: '#336600',
Name.Label: '#9999FF',
Name.Entity: 'bold #999999',
Name.Attribute: '#330099',
Name.Tag: 'bold #330099',
Name.Decorator: '#9999FF',
String: '#CC3300',
String.Doc: 'italic',
String.Interpol: '#AA0000',
String.Escape: 'bold #CC3300',
String.Regex: '#33AAAA',
String.Symbol: '#FFCC33',
String.Other: '#CC3300',
Number: '#FF6600',
Generic.Heading: 'bold #003300',
Generic.Subheading: 'bold #003300',
Generic.Deleted: 'border:#CC0000 bg:#FFCCCC',
Generic.Inserted: 'border:#00CC00 bg:#CCFFCC',
Generic.Error: '#FF0000',
Generic.Emph: 'italic',
Generic.Strong: 'bold',
Generic.Prompt: 'bold #000099',
Generic.Output: '#AAAAAA',
Generic.Traceback: '#99CC66',
Error: 'bg:#FFAAAA #AA0000'
}
|
mit
|
shujingke/opencog
|
opencog/python/pln_old/examples/deduction/deduction_agent.py
|
32
|
1303
|
"""
PLN Simple Deduction Agent Example
Demonstrates the simplest possible forward inference agent that implements
a chainer with one inference rule and one link type
For instructions, refer to the README for PLN.
"""
from opencog.cogserver import MindAgent
from opencog.atomspace import types
from pln.chainers import Chainer
from pln.rules import *
__author__ = 'Cosmo Harrigan'
__VERBOSE__ = False
class DeductionAgent(MindAgent):
def __init__(self):
self.chainer = None
def create_chainer(self, atomspace):
self.chainer = Chainer(atomspace, stimulateAtoms=False)
link_types = [types.InheritanceLink]
for link_type in link_types:
self.chainer.add_rule(DeductionRule(self.chainer, link_type))
def run(self, atomspace):
# Run is invoked on every cogserver cognitive cycle
# If it is the first time it has been invoked, then the chainer
# needs to be created
if self.chainer is None:
self.create_chainer(atomspace)
return
result = self.chainer.forward_step()
if __VERBOSE__:
print result
return result
def get_trails(self):
return self.chainer.trails
def get_history(self):
return self.chainer.history.get_history()
|
agpl-3.0
|
wisechengyi/pants
|
tests/python/pants_test/backend/graph_info/tasks/test_dependees.py
|
1
|
9733
|
# Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from textwrap import dedent
from pants.backend.codegen.thrift.java.java_thrift_library import JavaThriftLibrary
from pants.backend.graph_info.tasks.dependees import ReverseDepmap
from pants.backend.jvm.targets.jar_library import JarLibrary
from pants.backend.jvm.targets.java_library import JavaLibrary
from pants.backend.jvm.targets.scala_jar_dependency import ScalaJarDependency
from pants.backend.python.targets.python_library import PythonLibrary
from pants.backend.python.targets.python_tests import PythonTests
from pants.build_graph.build_file_aliases import BuildFileAliases
from pants.build_graph.resources import Resources
from pants.build_graph.target import Target
from pants.java.jar.jar_dependency import JarDependency
from pants.testutil.task_test_base import ConsoleTaskTestBase
class BaseReverseDepmapTest(ConsoleTaskTestBase):
@classmethod
def task_type(cls):
return ReverseDepmap
@classmethod
def alias_groups(cls):
return BuildFileAliases(
targets={
"target": Target,
"jar_library": JarLibrary,
"java_library": JavaLibrary,
"java_thrift_library": JavaThriftLibrary,
"python_library": PythonLibrary,
"python_tests": PythonTests,
"resources": Resources,
},
objects={"jar": JarDependency, "scala_jar": ScalaJarDependency,},
)
def setUp(self):
super().setUp()
def add_to_build_file(path, name, alias=False, deps=()):
self.add_to_build_file(
path,
dedent(
"""
{type}(name='{name}',
dependencies=[{deps}],
{sources}
)
""".format(
type="target" if alias else "python_library",
name=name,
deps=",".join("'{0}'".format(dep) for dep in list(deps)),
sources="" if alias else "sources=[]",
),
),
)
add_to_build_file("common/a", "a", deps=["common/d"])
add_to_build_file("common/b", "b")
add_to_build_file("common/c", "c")
add_to_build_file("common/d", "d")
add_to_build_file("tests/d", "d", deps=["common/d"])
add_to_build_file("overlaps", "one", deps=["common/a", "common/b"])
add_to_build_file("overlaps", "two", deps=["common/a", "common/c"])
add_to_build_file("overlaps", "three", deps=["common/a", "overlaps:one"])
add_to_build_file("overlaps", "four", alias=True, deps=["common/b"])
add_to_build_file("overlaps", "five", deps=["overlaps:four"])
self.add_to_build_file(
"resources/a",
dedent(
"""
resources(
name='a_resources',
sources=['a.resource']
)
"""
),
)
self.add_to_build_file(
"src/java/a",
dedent(
"""
java_library(
name='a_java',
sources=[],
dependencies=['resources/a:a_resources']
)
"""
),
)
# Compile idl tests
self.add_to_build_file(
"src/thrift/example",
dedent(
"""
java_thrift_library(
name='mybird',
compiler='scrooge',
language='scala',
sources=['1.thrift']
)
"""
),
)
self.add_to_build_file(
"src/thrift/example",
dedent(
"""
target(
name='compiled_scala',
dependencies=[
':mybird',
]
)
"""
),
)
self.add_to_build_file(
"src/thrift/example",
dedent(
"""
java_library(
name='compiled_java_user',
dependencies=[
':compiled_scala'
],
sources=['1.java'],
)
"""
),
)
add_to_build_file("src/thrift/dependent", "my-example", deps=["src/thrift/example:mybird"])
self.add_to_build_file(
"src/java/example",
dedent(
"""
jar_library(
name='mybird-jars',
jars=[
jar(org='com', name='twitter')
],
)
"""
),
)
# External Dependency tests
self.add_to_build_file(
"src/java/example",
dedent(
"""
java_library(
name='mybird',
dependencies=[':mybird-jars'],
sources=['1.java'],
)
"""
),
)
self.add_to_build_file(
"src/java/example",
dedent(
"""
java_library(
name='example2',
dependencies=[
':mybird',
],
sources=['2.java']
)
"""
),
)
class ReverseDepmapTest(BaseReverseDepmapTest):
def test_empty(self):
self.assert_console_output(targets=[])
def test_empty_json(self):
self.assert_console_output("{}", targets=[], options={"output_format": "json"})
def test_roots(self):
self.assert_console_output(
"overlaps:two",
targets=[self.target("common/c")],
extra_targets=[self.target("common/a")],
)
def test_normal(self):
self.assert_console_output("overlaps:two", targets=[self.target("common/c")])
def test_closed(self):
self.assert_console_output(
"overlaps:two",
"common/c:c",
targets=[self.target("common/c")],
options={"closed": True},
)
def test_closed_output_format_json(self):
self.assert_console_output(
dedent(
"""
{
"common/c:c": [
"common/c:c",
"overlaps:two"
]
}"""
).lstrip("\n"),
targets=[self.target("common/c")],
options={"closed": True, "output_format": "json"},
)
def test_transitive(self):
self.assert_console_output(
"overlaps:one",
"overlaps:three",
"overlaps:four",
"overlaps:five",
targets=[self.target("common/b")],
options={"transitive": True},
)
def test_transitive_output_format_json(self):
self.assert_console_output(
dedent(
"""
{
"common/b:b": [
"overlaps:five",
"overlaps:four",
"overlaps:one",
"overlaps:three"
]
}"""
).lstrip("\n"),
targets=[self.target("common/b")],
options={"transitive": True, "output_format": "json"},
)
def test_nodups_dependees_output_format_json(self):
self.assert_console_output(
dedent(
"""
{
"common/a:a": [
"overlaps:one",
"overlaps:three",
"overlaps:two"
],
"overlaps:one": [
"overlaps:three"
]
}"""
).lstrip("\n"),
targets=[self.target("common/a"), self.target("overlaps:one")],
options={"output_format": "json"},
)
def test_nodups_dependees(self):
self.assert_console_output(
"overlaps:two",
"overlaps:three",
targets=[self.target("common/a"), self.target("overlaps:one")],
)
def test_nodups_roots(self):
targets = [self.target("common/c")] * 2
self.assertEqual(2, len(targets))
self.assert_console_output(
"overlaps:two", "common/c:c", targets=targets, options={"closed": True}
)
def test_aliasing(self):
self.assert_console_output("overlaps:five", targets=[self.target("overlaps:four")])
def test_compile_idls(self):
self.assert_console_output(
"src/thrift/dependent:my-example",
"src/thrift/example:compiled_scala",
targets=[self.target("src/thrift/example:mybird"),],
)
def test_external_dependency(self):
self.assert_console_output(
"src/java/example:example2", targets=[self.target("src/java/example:mybird")]
)
def test_resources_dependees(self):
self.assert_console_output(
"src/java/a:a_java", targets=[self.target("resources/a:a_resources")]
)
def test_overlaps_without_build_ignore_patterns(self):
self.assert_console_output(
"overlaps:one", "overlaps:two", "overlaps:three", targets=[self.target("common/a")]
)
|
apache-2.0
|
playm2mboy/edx-platform
|
common/test/acceptance/tests/studio/test_studio_with_ora_component.py
|
87
|
4074
|
"""
Acceptance tests for Studio related to edit/save peer grading interface.
"""
from ...fixtures.course import XBlockFixtureDesc
from ...pages.studio.import_export import ExportCoursePage
from ...pages.studio.component_editor import ComponentEditorView
from ...pages.studio.overview import CourseOutlinePage
from base_studio_test import StudioCourseTest
from ..helpers import load_data_str
class ORAComponentTest(StudioCourseTest):
"""
Tests tht edit/save is working correctly when link_to_location
is given in peer grading interface settings.
"""
def setUp(self):
super(ORAComponentTest, self).setUp()
self.course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
self.export_page = ExportCoursePage(
self.browser,
self.course_info['org'], self.course_info['number'], self.course_info['run']
)
def populate_course_fixture(self, course_fixture):
"""
Return a test course fixture containing a discussion component.
"""
course_fixture.add_children(
XBlockFixtureDesc('chapter', 'Test Section').add_children(
XBlockFixtureDesc('sequential', 'Test Subsection').add_children(
XBlockFixtureDesc('vertical', 'Test Unit').add_children(
XBlockFixtureDesc(
'combinedopenended',
"Peer Problem",
data=load_data_str('ora_peer_problem.xml'),
metadata={
'graded': True,
},
),
XBlockFixtureDesc('peergrading', 'Peer Module'),
)
)
)
)
def _go_to_unit_page(self, section_name='Test Section', subsection_name='Test Subsection', unit_name='Test Unit'):
self.course_outline_page.visit()
subsection = self.course_outline_page.section(section_name).subsection(subsection_name)
return subsection.expand_subsection().unit(unit_name).go_to()
def test_edit_save_and_export(self):
"""
Ensure that edit/save is working correctly with link_to_location
in peer interface settings.
"""
self.course_outline_page.visit()
unit = self._go_to_unit_page()
peer_problem_location = unit.xblocks[1].locator
# Problem location should contain "combinedopeneneded".
self.assertIn("combinedopenended", peer_problem_location)
component = unit.xblocks[2]
# Interface component name should be "Peer Module".
self.assertEqual(component.name, "Peer Module")
component.edit()
component_editor = ComponentEditorView(self.browser, component.locator)
component_editor.set_field_value_and_save('Link to Problem Location', peer_problem_location)
# Verify that we can edit component again after saving and link_to_location is present.
component.edit()
location_input_element = component_editor.get_setting_element("Link to Problem Location")
self.assertEqual(
location_input_element.get_attribute('value'),
peer_problem_location
)
def test_verify_ora1_deprecation_message(self):
"""
Scenario: Verifies the ora1 deprecation message on ora components.
Given I have a course with ora 1 components
When I go to the unit page
Then I see a deprecation error message in ora 1 components.
"""
self.course_outline_page.visit()
unit = self._go_to_unit_page()
for xblock in unit.xblocks:
self.assertTrue(xblock.has_validation_error)
self.assertEqual(
xblock.validation_error_text,
"ORA1 is no longer supported. To use this assessment, "
"replace this ORA1 component with an ORA2 component."
)
|
agpl-3.0
|
Teamxrtc/webrtc-streaming-node
|
third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/boto/boto/route53/hostedzone.py
|
136
|
1961
|
# Copyright (c) 2010 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
class HostedZone(object):
def __init__(self, id=None, name=None, owner=None, version=None,
caller_reference=None):
self.id = id
self.name = name
self.owner = owner
self.version = version
self.caller_reference = caller_reference
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Id':
self.id = value
elif name == 'Name':
self.name = value
elif name == 'Owner':
self.owner = value
elif name == 'Version':
self.version = value
elif name == 'CallerReference':
self.caller_reference = value
else:
setattr(self, name, value)
|
mit
|
jocelynmass/nrf51
|
toolchain/deprecated/arm_cm0_4.9/arm-none-eabi/share/gdb/python/gdb/printing.py
|
12
|
10681
|
# Pretty-printer utilities.
# Copyright (C) 2010-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Utilities for working with pretty-printers."""
import gdb
import gdb.types
import re
import sys
if sys.version_info[0] > 2:
# Python 3 removed basestring and long
basestring = str
long = int
class PrettyPrinter(object):
"""A basic pretty-printer.
Attributes:
name: A unique string among all printers for the context in which
it is defined (objfile, progspace, or global(gdb)), and should
meaningfully describe what can be pretty-printed.
E.g., "StringPiece" or "protobufs".
subprinters: An iterable object with each element having a `name'
attribute, and, potentially, "enabled" attribute.
Or this is None if there are no subprinters.
enabled: A boolean indicating if the printer is enabled.
Subprinters are for situations where "one" pretty-printer is actually a
collection of several printers. E.g., The libstdc++ pretty-printer has
a pretty-printer for each of several different types, based on regexps.
"""
# While one might want to push subprinters into the subclass, it's
# present here to formalize such support to simplify
# commands/pretty_printers.py.
def __init__(self, name, subprinters=None):
self.name = name
self.subprinters = subprinters
self.enabled = True
def __call__(self, val):
# The subclass must define this.
raise NotImplementedError("PrettyPrinter __call__")
class SubPrettyPrinter(object):
"""Baseclass for sub-pretty-printers.
Sub-pretty-printers needn't use this, but it formalizes what's needed.
Attributes:
name: The name of the subprinter.
enabled: A boolean indicating if the subprinter is enabled.
"""
def __init__(self, name):
self.name = name
self.enabled = True
def register_pretty_printer(obj, printer, replace=False):
"""Register pretty-printer PRINTER with OBJ.
The printer is added to the front of the search list, thus one can override
an existing printer if one needs to. Use a different name when overriding
an existing printer, otherwise an exception will be raised; multiple
printers with the same name are disallowed.
Arguments:
obj: Either an objfile, progspace, or None (in which case the printer
is registered globally).
printer: Either a function of one argument (old way) or any object
which has attributes: name, enabled, __call__.
replace: If True replace any existing copy of the printer.
Otherwise if the printer already exists raise an exception.
Returns:
Nothing.
Raises:
TypeError: A problem with the type of the printer.
ValueError: The printer's name contains a semicolon ";".
RuntimeError: A printer with the same name is already registered.
If the caller wants the printer to be listable and disableable, it must
follow the PrettyPrinter API. This applies to the old way (functions) too.
If printer is an object, __call__ is a method of two arguments:
self, and the value to be pretty-printed. See PrettyPrinter.
"""
# Watch for both __name__ and name.
# Functions get the former for free, but we don't want to use an
# attribute named __foo__ for pretty-printers-as-objects.
# If printer has both, we use `name'.
if not hasattr(printer, "__name__") and not hasattr(printer, "name"):
raise TypeError("printer missing attribute: name")
if hasattr(printer, "name") and not hasattr(printer, "enabled"):
raise TypeError("printer missing attribute: enabled")
if not hasattr(printer, "__call__"):
raise TypeError("printer missing attribute: __call__")
if obj is None:
if gdb.parameter("verbose"):
gdb.write("Registering global %s pretty-printer ...\n" % name)
obj = gdb
else:
if gdb.parameter("verbose"):
gdb.write("Registering %s pretty-printer for %s ...\n" %
(printer.name, obj.filename))
if hasattr(printer, "name"):
if not isinstance(printer.name, basestring):
raise TypeError("printer name is not a string")
# If printer provides a name, make sure it doesn't contain ";".
# Semicolon is used by the info/enable/disable pretty-printer commands
# to delimit subprinters.
if printer.name.find(";") >= 0:
raise ValueError("semicolon ';' in printer name")
# Also make sure the name is unique.
# Alas, we can't do the same for functions and __name__, they could
# all have a canonical name like "lookup_function".
# PERF: gdb records printers in a list, making this inefficient.
i = 0
for p in obj.pretty_printers:
if hasattr(p, "name") and p.name == printer.name:
if replace:
del obj.pretty_printers[i]
break
else:
raise RuntimeError("pretty-printer already registered: %s" %
printer.name)
i = i + 1
obj.pretty_printers.insert(0, printer)
class RegexpCollectionPrettyPrinter(PrettyPrinter):
"""Class for implementing a collection of regular-expression based pretty-printers.
Intended usage:
pretty_printer = RegexpCollectionPrettyPrinter("my_library")
pretty_printer.add_printer("myclass1", "^myclass1$", MyClass1Printer)
...
pretty_printer.add_printer("myclassN", "^myclassN$", MyClassNPrinter)
register_pretty_printer(obj, pretty_printer)
"""
class RegexpSubprinter(SubPrettyPrinter):
def __init__(self, name, regexp, gen_printer):
super(RegexpCollectionPrettyPrinter.RegexpSubprinter, self).__init__(name)
self.regexp = regexp
self.gen_printer = gen_printer
self.compiled_re = re.compile(regexp)
def __init__(self, name):
super(RegexpCollectionPrettyPrinter, self).__init__(name, [])
def add_printer(self, name, regexp, gen_printer):
"""Add a printer to the list.
The printer is added to the end of the list.
Arguments:
name: The name of the subprinter.
regexp: The regular expression, as a string.
gen_printer: A function/method that given a value returns an
object to pretty-print it.
Returns:
Nothing.
"""
# NOTE: A previous version made the name of each printer the regexp.
# That makes it awkward to pass to the enable/disable commands (it's
# cumbersome to make a regexp of a regexp). So now the name is a
# separate parameter.
self.subprinters.append(self.RegexpSubprinter(name, regexp,
gen_printer))
def __call__(self, val):
"""Lookup the pretty-printer for the provided value."""
# Get the type name.
typename = gdb.types.get_basic_type(val.type).tag
if not typename:
typename = val.type.name
if not typename:
return None
# Iterate over table of type regexps to determine
# if a printer is registered for that type.
# Return an instantiation of the printer if found.
for printer in self.subprinters:
if printer.enabled and printer.compiled_re.search(typename):
return printer.gen_printer(val)
# Cannot find a pretty printer. Return None.
return None
# A helper class for printing enum types. This class is instantiated
# with a list of enumerators to print a particular Value.
class _EnumInstance:
def __init__(self, enumerators, val):
self.enumerators = enumerators
self.val = val
def to_string(self):
flag_list = []
v = long(self.val)
any_found = False
for (e_name, e_value) in self.enumerators:
if v & e_value != 0:
flag_list.append(e_name)
v = v & ~e_value
any_found = True
if not any_found or v != 0:
# Leftover value.
flag_list.append('<unknown: 0x%x>' % v)
return "0x%x [%s]" % (self.val, " | ".join(flag_list))
class FlagEnumerationPrinter(PrettyPrinter):
"""A pretty-printer which can be used to print a flag-style enumeration.
A flag-style enumeration is one where the enumerators are or'd
together to create values. The new printer will print these
symbolically using '|' notation. The printer must be registered
manually. This printer is most useful when an enum is flag-like,
but has some overlap. GDB's built-in printing will not handle
this case, but this printer will attempt to."""
def __init__(self, enum_type):
super(FlagEnumerationPrinter, self).__init__(enum_type)
self.initialized = False
def __call__(self, val):
if not self.initialized:
self.initialized = True
flags = gdb.lookup_type(self.name)
self.enumerators = []
for field in flags.fields():
self.enumerators.append((field.name, field.enumval))
# Sorting the enumerators by value usually does the right
# thing.
self.enumerators.sort(key = lambda x: x.enumval)
if self.enabled:
return _EnumInstance(self.enumerators, val)
else:
return None
# Builtin pretty-printers.
# The set is defined as empty, and files in printing/*.py add their printers
# to this with add_builtin_pretty_printer.
_builtin_pretty_printers = RegexpCollectionPrettyPrinter("builtin")
register_pretty_printer(None, _builtin_pretty_printers)
# Add a builtin pretty-printer.
def add_builtin_pretty_printer(name, regexp, printer):
_builtin_pretty_printers.add_printer(name, regexp, printer)
|
gpl-2.0
|
cosurgi/trunk
|
examples/HydroForceEngine/validations/DEMCoupling/Maurinetal2015/validMaurin2015.py
|
2
|
17210
|
from __future__ import print_function
#########################################################################################################################################################################
# Author: Raphael Maurin, raphael.maurin@imft.fr
# 24/11/2017
#
# Reproduce the experimental comparison of Maurin et al (2015), considering the experiments of Frey (2014)
# The experiment consist of a quasi-2D channel
# Adapt the script "sedimentTransportExample_1DRANSCoupling.py", for the quasi-2D configuration, putting lateral walls
# and defining the configuration in order to reproduce exactly the experimental configuration of Frey (2014)
# The fluid resolution also includes a sink term to account for the friction on the lateral walls
# In order to reproduce the experimental results, it is also necessary to account for the fluid velocity fluctuation through
# a discrete random walk model, as described in Maurin et al (2015). The latter is imposed by the function turbulentFluctuations()
#
# The comparison with the experiments can be plotted using the postProcess_validation.py file
#
############################################################################################################################################################################
#Import libraries
from builtins import range
from yade import pack, plot
import math
import random as rand
import numpy as np
#Choice of the experiment to reproduce. Only one should be set to 1.
sim6 = 1
sim14 = 0
sim20 = 0
##
## Main parameters of the simulation
##
#Particles
diameterPart = 6e-3 #Diameter of the particles, in m
densPart = 2500 #density of the particles, in kg/m3
phiPartMax = 0.51 #Value of the dense packing solid volume fraction, dimensionless
restitCoef = 0.5 #Restitution coefficient of the particles, dimensionless
partFrictAngle = atan(0.4) #friction angle of the particles, in radian
#Fluid
densFluidPY = 1000. #Density of the fluid, in kg/m^3
kinematicViscoFluid = 1e-6 #kinematic viscosity of the fluid, in m^2/s
dtFluid = 1e-5 #Time step for the fluid resolution, in s
fluidResolPeriod = 1e-2 #Time between two fluid resolution, in s
#Configuration: inclined channel
slope = 0.1 #Inclination angle of the channel slope in radian
lengthCell = 1000 #Streamwise length of the periodic cell, in diameter
widthCell = 6.5/6. #Spanwise length of the periodic cell, in diameter
if sim6==1:
fluidHeight = 5.3e-2 #Height of the flow from the bottom of the sample, in m
Nlayer = 7.08 #nb of layer of particle, in diameter
nbSim=6
elif sim14==1:
fluidHeight = 5.7e-2 #Height of the flow from the bottom of the sample, in m
Nlayer = 7.37 #nb of layer of particle, in diameter
nbSim=14
elif sim20==1:
fluidHeight = 5.9e-2 #Height of the flow from the bottom of the sample, in m
Nlayer = 7.30 #nb of layer of particle, in diameter
nbSim=20
else:
print('\n At least one of the option sim6, sim14 or sim20 should be equal to 1 !! Exit !\n')
exit()
saveData = 1 #If put to 1, at each execution of function measure() save the sediment transport rate, fluid velocity, solid volume fraction and velocity profiles for post-processing
endTime = 100 #Time simulated (in seconds)
##
## Secondary parameters of the simulation
##
expoDrag_PY = 3.1 # Richardson Zaki exponent for the hindrance function of the drag force applied to the particles
#Discretization of the sample in ndimz wall-normal (z) steps of size dz, between the bottom of the channel and the position of the water free-surface. Should be equal to the length of the imposed fluid profile. Mesh used for HydroForceEngine.
ndimz = 301 #Number of cells in the height
dz = fluidHeight/(1.0*(ndimz-1)) # Fluid discretization step in the wall-normal direction
# Initialization of the main vectors
vxFluidPY = np.zeros(ndimz+1) # Vertical fluid velocity profile: u^f = u_x^f(z) e_x, with x the streamwise direction and z the wall-normal
phiPartPY = np.zeros(ndimz-1) # Vertical particle volume fraction profile
vxPartPY = np.zeros(ndimz-1) # Vertical average particle velocity profile
#Geometrical configuration, define useful quantities
height = 5*fluidHeight #heigth of the periodic cell, in m (bigger than the fluid height to take into particles jumping above the latter)
length = lengthCell*diameterPart #length of the stream, in m
width = widthCell*diameterPart #width of the stream, in m
groundPosition = height/4.0 #Definition of the position of the ground, in m
gravityVector = Vector3(9.81*sin(slope),0.0,-9.81*cos(slope)) #Gravity vector to consider a channel inclined with slope angle 'slope'
#Particles contact law/material parameters
maxPressure = (densPart-densFluidPY)*phiPartMax*Nlayer*diameterPart*abs(gravityVector[2]) #Estimated max particle pressure from the static load
normalStiffness = maxPressure*diameterPart*1e4 #Evaluate the minimal normal stiffness to be in the rigid particle limit (cf Roux and Combe 2002)
youngMod = normalStiffness/diameterPart #Young modulus of the particles from the stiffness wanted.
poissonRatio = 0.5 #poisson's ratio of the particles. Classical values, does not have much influence
O.materials.append(ViscElMat(en=restitCoef, et=0., young=youngMod, poisson=poissonRatio, density=densPart, frictionAngle=partFrictAngle, label='Mat'))
########################
## FRAMEWORK CREATION ##
########################
#Definition of the semi-periodic cell
O.periodic = True
# Reference walls: build two planes at the ground and free-surface to have a reference for the eyes in the 3D view
lowPlane = box(center= (length/2.0, width/2.0,groundPosition),extents=(200,200,0),fixed=True,wire=False,color = (0.,1.,0.),material = 'Mat')
WaterSurface = box(center= (length/2.0, width/2.0,groundPosition+fluidHeight),extents=(2000,width/2.0,0),fixed=True,wire=False,color = (0,0,1),material = 'Mat',mask = 0)
O.bodies.append([lowPlane,WaterSurface]) #add to simulation
#Lateral walls
O.cell.setBox(length,width+2*diameterPart,height)
#Modify accordingly the position of the center of the cell and the wall right and left position
leftLimitY = diameterPart
rightLimitY = width+diameterPart
centerLimitY = diameterPart + width/2.0
#Define the wall and add to the simulation
sidePlaneL = box(center= (length/2.0,leftLimitY,height/2.0),extents=(2000,0,height*10),fixed=True,wire = True,color = (1,0,0), material = 'Mat')
sidePlaneR = box(center= (length/2.0,rightLimitY,height/2.0),extents=(2000,0,height*10.0),fixed=True,wire=True, material = 'Mat',color = (0,0,1))
O.bodies.append([sidePlaneR,sidePlaneL])
# Regular arrangement of spheres sticked at the bottom with random height
L = list(range(0,int(length/(diameterPart)))) #The length is divided in particle diameter
W = list(range(0,int(width/(diameterPart)))) #The width is divided in particle diameter
for x in L: #loop creating a set of sphere sticked at the bottom with a (uniform) random altitude comprised between 0.5 (diameter/12) and 5.5mm (11diameter/12) with steps of 0.5mm. The repartition along z is made around groundPosition.
for y in W:
n = rand.randrange(0,12,1)/12.0*diameterPart #Define a number between 0 and 11/12 diameter with steps of 1/12 diameter (0.5mm in the experiment)
O.bodies.append(sphere((x*diameterPart,centerLimitY,groundPosition - 11*diameterPart/12.0/2.0 + n),diameterPart/2.,color=(0,0,0),fixed = True,material = 'Mat'))
#Create a loose cloud of particle inside the cell
partCloud = pack.SpherePack()
partVolume = pi/6.*pow(diameterPart,3) #Volume of a particle
partNumber = int(Nlayer*lengthCell)
partCloud.makeCloud(minCorner=(0,centerLimitY,groundPosition+diameterPart),maxCorner=(length,centerLimitY,groundPosition+fluidHeight*2),rRelFuzz=0., rMean=diameterPart/2.0, num = partNumber)
partCloud.toSimulation(material='Mat') #Send this packing to simulation with material Mat
#Evaluate the deposition time considering the free-fall time of the highest particle to the ground
depoTime = sqrt(fluidHeight*2/abs(gravityVector[2]))
# Collect the ids of the spheres which are dynamic to add a fluid force through HydroForceEngines
idApplyForce = []
for b in O.bodies:
if isinstance(b.shape,Sphere) and b.dynamic:
idApplyForce+=[b.id]
#########################
#### SIMULATION LOOP#####
#########################
O.engines = [
# Reset the forces
ForceResetter(),
# Detect the potential contacts
InsertionSortCollider([Bo1_Sphere_Aabb(), Bo1_Wall_Aabb(),Bo1_Facet_Aabb(),Bo1_Box_Aabb()],label='contactDetection',allowBiggerThanPeriod = True),
# Calculate the different interactions
InteractionLoop(
[Ig2_Sphere_Sphere_ScGeom(), Ig2_Box_Sphere_ScGeom()],
[Ip2_ViscElMat_ViscElMat_ViscElPhys()],
[Law2_ScGeom_ViscElPhys_Basic()]
,label = 'interactionLoop'),
#Apply an hydrodynamic force to the particles
HydroForceEngine(densFluid = densFluidPY,viscoDyn = kinematicViscoFluid*densFluidPY,zRef = groundPosition,gravity = gravityVector,deltaZ = dz,expoRZ = expoDrag_PY,lift = False,nCell = ndimz,vCell = length*width*dz,radiusPart=diameterPart/2.,vxFluid = np.array(vxFluidPY),phiPart = phiPartPY,vxPart = vxPartPY,ids = idApplyForce, label = 'hydroEngine', dead = True,fluidWallFriction=True,channelWidth=width,phiMax = phiPartMax,iturbu = 1,ilm=2,iusl=1,irheolf=0),
#Solve the fluid volume-averaged 1D momentum balance, RANS 1D
PyRunner(command = 'fluidModel()', virtPeriod = fluidResolPeriod, label = 'fluidRes', dead = True),
#Apply fluid velocity turbulent fluctuations from a DRW random walk model
PyRunner(command = 'turbulentFluctuationPY()', virtPeriod = 0.1, label = 'turbFluct', dead = True),
#Measurement, output files
PyRunner(command = 'measure()', virtPeriod = 0.1, label = 'measurement', dead = True),
# Check if the packing is stabilized, if yes activate the hydro force on the grains and the slope.
PyRunner(command='gravityDeposition(depoTime)',virtPeriod = 0.01,label = 'gravDepo'),
#GlobalStiffnessTimeStepper, determine the time step
GlobalStiffnessTimeStepper(defaultDt = 1e-4, viscEl = False,timestepSafetyCoefficient = 0.7, label = 'GSTS'),
# Integrate the equation and calculate the new position/velocities...
NewtonIntegrator(damping=0.2, gravity=gravityVector, label='newtonIntegr')
]
#save the initial configuration to be able to recharge the simulation starting configuration easily
O.saveTmp()
#run
O.run()
####################################################################################################################################
#################################################### FUNCTION DEFINITION #########################################################
####################################################################################################################################
###### ######
### LET THE TIME FOR THE GRAVITY DEPOSITION AND ACTIVATE THE FLUID AT THE END ###
###### ######
def gravityDeposition(lim):
if O.time<lim : return
else :
print('\n Gravity deposition finished, apply fluid forces !\n')
newtonIntegr.damping = 0.0 # Set the artificial numerical damping to zero
gravDepo.dead = True # Remove the present engine for the following
hydroEngine.dead = False # Activate the HydroForceEngine
hydroEngine.vxFluid = vxFluidPY # Send the fluid velocity vector used to apply the drag fluid force on particles in HydroForceEngine (see c++ code)
hydroEngine.ReynoldStresses = np.ones(ndimz)*1e-4 # Send the simplified fluid Reynolds stresses Rxz/\rho^f used to account for the fluid velocity fluctuations in HydroForceEngine (see c++ code)
hydroEngine.turbulentFluctuation() #Initialize the fluid velocity fluctuation associated to particles to zero in HydroForceEngine, necessary to avoid segmentation fault
measurement.dead = False # Activate the measure() PyRunner
turbFluct.dead = False #Activate the turbulentFluctuationPY() PyRunner
fluidRes.dead = False # Activate the 1D fluid resolution executed in the PyRunner
hydroEngine.averageProfile() #Evaluate the solid volume fraction, velocity and drag, necessary for the fluid resolution.
hydroEngine.fluidResolution(dtFluid,dtFluid) #Initialize the fluid resolution, run the fluid resolution for 1s
return
###############
#########################################
####### ########
### FLUID RESOLUTION ###
####### ########
def fluidModel():
global vxFluidPY,taufsi
#Evaluate the average vx,vy,vz,phi,drag profiles and store it in hydroEngine, to prepare the fluid resolution
hydroEngine.averageProfile()
#Fluid resolution
hydroEngine.fluidResolution(fluidResolPeriod,dtFluid) #Solve the fluid momentum balance for a time of fluidResolPeriod s with a time step dtFluid
#update the fluid velocity for later save
vxFluidPY = np.array(hydroEngine.vxFluid)
####### ########
### TURBULENT FLUCTUATIONS MODEL ###
####### ########
def turbulentFluctuationPY():
#For stability requirement at the initialization stage
if O.time<depoTime+0.5:
print('No turbulent fluctuation in the initialization process for stability reasons!')
turbFluct.virtPeriod = 0.5
else:
# Evaluate nBed, the position of the bed which is assumed to be located around the first maximum of concentration when considering decreasing z.
nBed = 0.
bedElevation = 0.
for n in range(1,ndimz):
# if there is a peak and its value is superior to 0.5, we consider it to be the position of the bed
if phiPartPY[ndimz - n] < phiPartPY[ndimz - n-1] and phiPartPY[ndimz - n] > 0.5 :
nBed = ndimz - n
waterDepth = (ndimz-1 - nBed)*dz
bedElevation = fluidHeight - waterDepth #Evaluate the bed elevation for the following
break
#(Re)Define the bed elevation over which fluid turbulent fluctuations will be applied.
hydroEngine.bedElevation = bedElevation
#Impose a unique constant lifetime for the turbulent fluctuation, flucTimeScale
vMeanAboveBed = sum(vxFluidPY[nBed:])/(ndimz-nBed) # fluid elocity scale in the water depth
flucTimeScale = waterDepth/vMeanAboveBed # time scale of the fluctuation w_d/v, eddy turn over time
# New evaluation of the random fluid velocity fluctuation for each particle.
hydroEngine.turbulentFluctuation()
turbFluct.virtPeriod = flucTimeScale #Actualize when will be calculated the next fluctuations.
####### ########
### OUTPUT ###
####### ########
#Initialization
qsMean = 0 #Mean dimensionless sediment transport rate
zAxis = np.zeros(ndimz) #z scale, in diameter
for i in range(0,ndimz):#z scale used for the possible plot at the end
zAxis[i] = i*dz/diameterPart
# Averaging/Save
def measure():
global qsMean,vxPartPY,phiPartPY
#Evaluate the average depth profile of streamwise, spanwise and wall-normal particle velocity, particle volume fraction (and drag force for coupling with RANS fluid resolution), and store it in hydroEngine variables vxPart, phiPart, vyPart, vzPart, averageDrag.
hydroEngine.averageProfile()
#Extract the calculated vector. They can be saved and plotted afterwards.
vxPartPY = np.array(hydroEngine.vxPart)
phiPartPY = np.array(hydroEngine.phiPart)
#Evaluate the dimensionless sediment transport rate for information
qsMean = sum(phiPartPY*vxPartPY)*dz/sqrt((densPart/densFluidPY - 1)*abs(gravityVector[2])*pow(diameterPart,3))
plot.addData(SedimentRate = qsMean, time = O.time) #Plot it during the simulation
#Condition to stop the simulation after endTime seconds
if O.time>=endTime:
print('\n End of the simulation, simulated {0}s as required !\n '.format(endTime))
O.pause()
#Evaluate the Shields number from the maximum of the Reynolds stresses evaluated in the fluid resolution
shieldsNumber = max(hydroEngine.ReynoldStresses)/((densPart-densFluidPY)*diameterPart*abs(gravityVector[2]))
print('Shields number', shieldsNumber)
if saveData==1: #Save data for postprocessing
global fileNumber
nameFile = scriptPath + '/sim'+ str(nbSim) + '/data/'+ str(fileNumber)+'.py' # Name of the file that will be saved
globalParam = ['qsMean','phiPartPY','vxPartPY','vxFluidPY','zAxis'] # Variables to save
Save(nameFile, globalParam) #Save
fileNumber+=1 #Increment the file number
#Activate the fluid wall friction only at equilibrium. Not necessary for the transient.
if O.time>10:
hydroEngine.fluidWallFriction = True
#Plot the dimensionless sediment transport rate as a function of time during the simulation
plot.plots={'time':('SedimentRate')}
plot.plot()
################
##########################################
#Save data details
fileNumber = 0 # Counter for the file saved
if saveData==1: #If saveData option is activated, requires a folder data
scriptPath = os.path.abspath(os.path.dirname(sys.argv[-1])) #Path where the script is stored
if os.path.exists(scriptPath + '/sim'+ str(nbSim) +'/data/')==False:
os.makedirs(scriptPath + '/sim'+ str(nbSim) +'/data/')
else:
print('\n!! Save data: overwrite the files contains in the folder data/ !!\n')
#Function to save global variables in a python file which can be re-executed for post-processing
def Save(filePathName, globalVariables):
f = open(filePathName,'w')
f.write('from numpy import *\n')
for i in globalVariables:
f.write(i + ' = '+repr(globals()[i]) + '\n')
f.close()
|
gpl-2.0
|
russel1237/scikit-learn
|
sklearn/decomposition/__init__.py
|
147
|
1421
|
"""
The :mod:`sklearn.decomposition` module includes matrix decomposition
algorithms, including among others PCA, NMF or ICA. Most of the algorithms of
this module can be regarded as dimensionality reduction techniques.
"""
from .nmf import NMF, ProjectedGradientNMF
from .pca import PCA, RandomizedPCA
from .incremental_pca import IncrementalPCA
from .kernel_pca import KernelPCA
from .sparse_pca import SparsePCA, MiniBatchSparsePCA
from .truncated_svd import TruncatedSVD
from .fastica_ import FastICA, fastica
from .dict_learning import (dict_learning, dict_learning_online, sparse_encode,
DictionaryLearning, MiniBatchDictionaryLearning,
SparseCoder)
from .factor_analysis import FactorAnalysis
from ..utils.extmath import randomized_svd
from .online_lda import LatentDirichletAllocation
__all__ = ['DictionaryLearning',
'FastICA',
'IncrementalPCA',
'KernelPCA',
'MiniBatchDictionaryLearning',
'MiniBatchSparsePCA',
'NMF',
'PCA',
'ProjectedGradientNMF',
'RandomizedPCA',
'SparseCoder',
'SparsePCA',
'dict_learning',
'dict_learning_online',
'fastica',
'randomized_svd',
'sparse_encode',
'FactorAnalysis',
'TruncatedSVD',
'LatentDirichletAllocation']
|
bsd-3-clause
|
ddurando/my_pox
|
pox/lib/packet/icmp.py
|
39
|
7027
|
# Copyright 2011 James McCauley
# Copyright 2008 (C) Nicira, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This file is derived from the packet library in NOX, which was
# developed by Nicira, Inc.
#======================================================================
#
# ICMP Header Format
#
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Type | Code | Checksum |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#
#======================================================================
import struct
import random
from packet_utils import *
from packet_base import packet_base
TYPE_ECHO_REPLY = 0
TYPE_DEST_UNREACH = 3
TYPE_SRC_QUENCH = 4
TYPE_REDIRECT = 5
TYPE_ECHO_REQUEST = 8
TYPE_TIME_EXCEED = 11
CODE_UNREACH_NET = 0
CODE_UNREACH_HOST = 1
CODE_UNREACH_PROTO = 2
CODE_UNREACH_PORT = 3
CODE_UNREACH_FRAG = 4
CODE_UNREACH_SRC_RTE = 5
_type_to_name = {
0 : "ECHO_REPLY",
3 : "DEST_UNREACH",
4 : "SRC_QUENCH",
5 : "REDIRECT",
8 : "ECHO_REQUEST",
11 : "TIME_EXCEED",
}
# This is such a hack; someone really needs to rewrite the
# stringizing.
def _str_rest (s, p):
if p.next is None:
return s
if isinstance(p.next, basestring):
return "[%s bytes]" % (len(p.next),)
return s+str(p.next)
#----------------------------------------------------------------------
#
# Echo Request/Reply
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Identifier | Sequence Number |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Data |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#----------------------------------------------------------------------
class echo(packet_base):
"ICMP echo packet struct"
MIN_LEN = 4
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.id = random.randint(0, 65535)
self.seq = 0
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
return "[ICMP id:%i seq:%i]" % (self.id, self.seq)
def parse(self, raw):
assert isinstance(raw, bytes)
self.raw = raw
dlen = len(raw)
if dlen < self.MIN_LEN:
self.msg('(echo parse) warning echo payload too short to '
'parse header: data len %u' % (dlen,))
return
(self.id, self.seq) = struct.unpack('!HH', raw[:self.MIN_LEN])
self.parsed = True
self.next = raw[echo.MIN_LEN:]
def hdr(self, payload):
return struct.pack('!HH', self.id, self.seq)
#----------------------------------------------------------------------
#
# Destination Unreachable
# 0 1 2 3
# 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | Unused | Next-Hop MTU |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
# | IP Header + 8 bytes of original datagram's data |
# +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
#
#----------------------------------------------------------------------
class unreach(packet_base):
"ICMP unreachable packet struct"
MIN_LEN = 4
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.unused = 0
self.next_mtu = 0
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
s = ''.join(('[', 'm:', str(self.next_mtu), ']'))
return _str_rest(s, self)
def parse(self, raw):
assert isinstance(raw, bytes)
self.raw = raw
dlen = len(raw)
if dlen < self.MIN_LEN:
self.msg('(unreach parse) warning unreachable payload too short '
'to parse header: data len %u' % dlen)
return
(self.unused, self.next_mtu) \
= struct.unpack('!HH', raw[:self.MIN_LEN])
self.parsed = True
if dlen >= 28:
# xxx We're assuming this is IPv4!
import ipv4
self.next = ipv4.ipv4(raw=raw[unreach.MIN_LEN:],prev=self)
else:
self.next = raw[unreach.MIN_LEN:]
def hdr(self, payload):
return struct.pack('!HH', self.unused, self.next_mtu)
class icmp(packet_base):
"ICMP packet struct"
MIN_LEN = 4
def __init__(self, raw=None, prev=None, **kw):
packet_base.__init__(self)
self.prev = prev
self.type = 0
self.code = 0
self.csum = 0
if raw is not None:
self.parse(raw)
self._init(kw)
def __str__(self):
t = _type_to_name.get(self.type, str(self.type))
s = '[t:%s c:%i chk:%x]' % (t, self.code, self.csum)
return _str_rest(s, self)
def parse(self, raw):
assert isinstance(raw, bytes)
dlen = len(raw)
if dlen < self.MIN_LEN:
self.msg('(icmp parse) warning ICMP packet data too short to '
+ 'parse header: data len %u' % (dlen,))
return
(self.type, self.code, self.csum) \
= struct.unpack('!BBH', raw[:self.MIN_LEN])
self.parsed = True
if (self.type == TYPE_ECHO_REQUEST or self.type == TYPE_ECHO_REPLY):
self.next = echo(raw=raw[self.MIN_LEN:],prev=self)
elif self.type == TYPE_DEST_UNREACH:
self.next = unreach(raw=raw[self.MIN_LEN:],prev=self)
else:
self.next = raw[self.MIN_LEN:]
def hdr(self, payload):
self.csum = checksum(struct.pack('!BBH', self.type, self.code, 0) +
payload)
return struct.pack('!BBH', self.type, self.code, self.csum)
|
apache-2.0
|
Ictp/indico
|
bin/migration/CategoryDate.py
|
1
|
2445
|
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
import sys
from datetime import datetime
from indico.core.db import DBMgr
from MaKaC.common.indexes import IndexesHolder, CategoryDayIndex
def switchIndex():
if IndexesHolder()._getIdx().has_key("backupCategoryDate") and IndexesHolder()._getIdx().has_key("categoryDate"):
tmp = IndexesHolder()._getIdx()["backupCategoryDate"]
IndexesHolder()._getIdx()["backupCategoryDate"] = IndexesHolder().getIndex("categoryDate")
IndexesHolder()._getIdx()["categoryDate"] = tmp
print "Index was switched"
else:
print "Cannot switch indexes."
def migrateCategoryDateIndex():
IndexesHolder()._getIdx()["backupCategoryDate"] = IndexesHolder().getIndex("categoryDate")
newIdx = CategoryDayIndex()
newIdx.buildIndex()
IndexesHolder()._getIdx()["categoryDate"] = newIdx
print "Migration was successful"
def displayIndexes():
for idx in IndexesHolder()._getIdx():
print str(idx) + " " + str(IndexesHolder()._getIdx()[str(idx)])
def deleteBackup():
if IndexesHolder()._getIdx().has_key("backupCategoryDate"):
del IndexesHolder()._getIdx()["backupCategoryDate"]
print "Backup deleted."
else:
print "Backup not found"
def main(argv):
DBMgr.getInstance().startRequest()
print "Req start at " + str(datetime.now())
if "migrate" in argv:
migrateCategoryDateIndex()
if "switch" in argv:
switchIndex()
if "removeBackup" in argv:
deleteBackup()
if "display" in argv:
displayIndexes()
print "Req ends at " + str(datetime.now())
DBMgr.getInstance().endRequest()
if __name__ == "__main__":
sys.exit(main(sys.argv))
|
gpl-3.0
|
MisterPup/Ceilometer-Juno-Extension
|
ceilometer/network/services/fwaas.py
|
6
|
3146
|
#
# Copyright 2014 Cisco Systems,Inc.
#
# Author: Pradeep Kilambi <pkilambi@cisco.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.utils import timeutils
from ceilometer.network.services import base
from ceilometer.openstack.common.gettextutils import _
from ceilometer.openstack.common import log
from ceilometer import sample
LOG = log.getLogger(__name__)
class FirewallPollster(base.BaseServicesPollster):
"""Pollster to capture firewalls status samples."""
FIELDS = ['admin_state_up',
'description',
'name',
'status',
'firewall_policy_id',
]
@property
def default_discovery(self):
return 'fw_services'
def get_samples(self, manager, cache, resources):
resources = resources or []
for fw in resources:
LOG.debug("Firewall : %s" % fw)
status = self.get_status_id(fw['status'])
if status == -1:
# unknown status, skip this sample
LOG.warn(_("Unknown status %(stat)s received on fw %(id)s,"
"skipping sample") % {'stat': fw['status'],
'id': fw['id']})
continue
yield sample.Sample(
name='network.services.firewall',
type=sample.TYPE_GAUGE,
unit='firewall',
volume=status,
user_id=None,
project_id=fw['tenant_id'],
resource_id=fw['id'],
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=self.extract_metadata(fw)
)
class FirewallPolicyPollster(base.BaseServicesPollster):
"""Pollster to capture firewall policy samples."""
FIELDS = ['name',
'description',
'name',
'firewall_rules',
'shared',
'audited',
]
@property
def default_discovery(self):
return 'fw_policy'
def get_samples(self, manager, cache, resources):
resources = resources or []
for fw in resources:
LOG.debug("Firewall Policy: %s" % fw)
yield sample.Sample(
name='network.services.firewall.policy',
type=sample.TYPE_GAUGE,
unit='policy',
volume=1,
user_id=None,
project_id=fw['tenant_id'],
resource_id=fw['id'],
timestamp=timeutils.utcnow().isoformat(),
resource_metadata=self.extract_metadata(fw)
)
|
apache-2.0
|
raycarnes/account-financial-tools
|
account_credit_control/invoice.py
|
24
|
2923
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Vincent Renaville
# Copyright 2013 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
class AccountInvoice(models.Model):
"""Check on cancelling of an invoice"""
_inherit = 'account.invoice'
credit_policy_id = fields.Many2one(
'credit.control.policy',
string='Credit Control Policy',
help="The Credit Control Policy used for this "
"invoice. If nothing is defined, it will "
"use the account setting or the partner "
"setting.",
readonly=True,
copy=False,
groups="account_credit_control.group_account_credit_control_manager,"
"account_credit_control.group_account_credit_control_user,"
"account_credit_control.group_account_credit_control_info",
)
credit_control_line_ids = fields.One2many(
'credit.control.line', 'invoice_id',
string='Credit Lines',
readonly=True,
copy=False,
)
@api.multi
def action_cancel(self):
"""Prevent to cancel invoice related to credit line"""
# We will search if this invoice is linked with credit
cc_line_obj = self.env['credit.control.line']
for invoice in self:
nondraft_domain = [('invoice_id', '=', invoice.id),
('state', '!=', 'draft')]
cc_nondraft_lines = cc_line_obj.search(nondraft_domain)
if cc_nondraft_lines:
raise api.Warning(
_('You cannot cancel this invoice.\n'
'A payment reminder has already been '
'sent to the customer.\n'
'You must create a credit note and '
'issue a new invoice.')
)
draft_domain = [('invoice_id', '=', invoice.id),
('state', '=', 'draft')]
cc_draft_line = cc_line_obj.search(draft_domain)
cc_draft_line.unlink()
return super(AccountInvoice, self).action_cancel()
|
agpl-3.0
|
leedm777/ansible
|
lib/ansible/executor/task_executor.py
|
10
|
22095
|
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import pipes
import subprocess
import sys
import time
from ansible import constants as C
from ansible.errors import AnsibleError, AnsibleParserError
from ansible.playbook.conditional import Conditional
from ansible.playbook.task import Task
from ansible.plugins import connection_loader, action_loader
from ansible.template import Templar
from ansible.utils.listify import listify_lookup_plugin_terms
from ansible.utils.unicode import to_unicode
from ansible.utils.debug import debug
__all__ = ['TaskExecutor']
class TaskExecutor:
'''
This is the main worker class for the executor pipeline, which
handles loading an action plugin to actually dispatch the task to
a given host. This class roughly corresponds to the old Runner()
class.
'''
# Modules that we optimize by squashing loop items into a single call to
# the module
SQUASH_ACTIONS = frozenset(('apt', 'yum', 'pkgng', 'zypper', 'dnf'))
def __init__(self, host, task, job_vars, play_context, new_stdin, loader, shared_loader_obj):
self._host = host
self._task = task
self._job_vars = job_vars
self._play_context = play_context
self._new_stdin = new_stdin
self._loader = loader
self._shared_loader_obj = shared_loader_obj
try:
from __main__ import display
self._display = display
except ImportError:
from ansible.utils.display import Display
self._display = Display()
def run(self):
'''
The main executor entrypoint, where we determine if the specified
task requires looping and either runs the task with
'''
debug("in run()")
try:
# lookup plugins need to know if this task is executing from
# a role, so that it can properly find files/templates/etc.
roledir = None
if self._task._role:
roledir = self._task._role._role_path
self._job_vars['roledir'] = roledir
items = self._get_loop_items()
if items is not None:
if len(items) > 0:
item_results = self._run_loop(items)
# loop through the item results, and remember the changed/failed
# result flags based on any item there.
changed = False
failed = False
for item in item_results:
if 'changed' in item and item['changed']:
changed = True
if 'failed' in item and item['failed']:
failed = True
# create the overall result item, and set the changed/failed
# flags there to reflect the overall result of the loop
res = dict(results=item_results)
if changed:
res['changed'] = True
if failed:
res['failed'] = True
res['msg'] = 'One or more items failed'
else:
res['msg'] = 'All items completed'
else:
res = dict(changed=False, skipped=True, skipped_reason='No items in the list', results=[])
else:
debug("calling self._execute()")
res = self._execute()
debug("_execute() done")
# make sure changed is set in the result, if it's not present
if 'changed' not in res:
res['changed'] = False
debug("dumping result to json")
result = json.dumps(res)
debug("done dumping result, returning")
return result
except AnsibleError, e:
return dict(failed=True, msg=to_unicode(e, nonstring='simplerepr'))
finally:
try:
self._connection.close()
except AttributeError:
pass
except Exception, e:
debug("error closing connection: %s" % to_unicode(e))
def _get_loop_items(self):
'''
Loads a lookup plugin to handle the with_* portion of a task (if specified),
and returns the items result.
'''
# create a copy of the job vars here so that we can modify
# them temporarily without changing them too early for other
# parts of the code that might still need a pristine version
vars_copy = self._job_vars.copy()
# now we update them with the play context vars
self._play_context.update_vars(vars_copy)
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=vars_copy)
items = None
if self._task.loop:
if self._task.loop in self._shared_loader_obj.lookup_loader:
loop_terms = listify_lookup_plugin_terms(terms=self._task.loop_args, templar=templar, loader=self._loader, fail_on_undefined=True)
items = self._shared_loader_obj.lookup_loader.get(self._task.loop, loader=self._loader, templar=templar).run(terms=loop_terms, variables=vars_copy)
else:
raise AnsibleError("Unexpected failure in finding the lookup named '%s' in the available lookup plugins" % self._task.loop)
return items
def _run_loop(self, items):
'''
Runs the task with the loop items specified and collates the result
into an array named 'results' which is inserted into the final result
along with the item for which the loop ran.
'''
results = []
# make copies of the job vars and task so we can add the item to
# the variables and re-validate the task with the item variable
task_vars = self._job_vars.copy()
items = self._squash_items(items, task_vars)
for item in items:
task_vars['item'] = item
try:
tmp_task = self._task.copy()
except AnsibleParserError, e:
results.append(dict(failed=True, msg=str(e)))
continue
# now we swap the internal task with the copy, execute,
# and swap them back so we can do the next iteration cleanly
(self._task, tmp_task) = (tmp_task, self._task)
res = self._execute(variables=task_vars)
(self._task, tmp_task) = (tmp_task, self._task)
# now update the result with the item info, and append the result
# to the list of results
res['item'] = item
results.append(res)
return results
def _squash_items(self, items, variables):
'''
Squash items down to a comma-separated list for certain modules which support it
(typically package management modules).
'''
if len(items) > 0 and self._task.action in self.SQUASH_ACTIONS:
final_items = []
name = self._task.args.pop('name', None) or self._task.args.pop('pkg', None)
for item in items:
variables['item'] = item
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
if self._task.evaluate_conditional(templar, variables):
if templar._contains_vars(name):
new_item = templar.template(name)
final_items.append(new_item)
else:
final_items.append(item)
joined_items = ",".join(final_items)
self._task.args['name'] = joined_items
return [joined_items]
else:
return items
def _execute(self, variables=None):
'''
The primary workhorse of the executor system, this runs the task
on the specified host (which may be the delegated_to host) and handles
the retry/until and block rescue/always execution
'''
if variables is None:
variables = self._job_vars
templar = Templar(loader=self._loader, shared_loader_obj=self._shared_loader_obj, variables=variables)
# fields set from the play/task may be based on variables, so we have to
# do the same kind of post validation step on it here before we use it.
self._play_context.post_validate(templar=templar)
# now that the play context is finalized, we can add 'magic'
# variables to the variable dictionary
self._play_context.update_vars(variables)
# Evaluate the conditional (if any) for this task, which we do before running
# the final task post-validation. We do this before the post validation due to
# the fact that the conditional may specify that the task be skipped due to a
# variable not being present which would otherwise cause validation to fail
if not self._task.evaluate_conditional(templar, variables):
debug("when evaulation failed, skipping this task")
return dict(changed=False, skipped=True, skip_reason='Conditional check failed')
# Now we do final validation on the task, which sets all fields to their final values.
# In the case of debug tasks, we save any 'var' params and restore them after validating
# so that variables are not replaced too early.
prev_var = None
if self._task.action == 'debug' and 'var' in self._task.args:
prev_var = self._task.args.pop('var')
self._task.post_validate(templar=templar)
if '_variable_params' in self._task.args:
variable_params = self._task.args.pop('_variable_params')
if isinstance(variable_params, dict):
self._display.deprecated("Using variables for task params is unsafe, especially if the variables come from an external source like facts")
variable_params.update(self._task.args)
self._task.args = variable_params
if prev_var is not None:
self._task.args['var'] = prev_var
# if this task is a TaskInclude, we just return now with a success code so the
# main thread can expand the task list for the given host
if self._task.action == 'include':
include_variables = self._task.args.copy()
include_file = include_variables.get('_raw_params')
del include_variables['_raw_params']
return dict(include=include_file, include_variables=include_variables)
# get the connection and the handler for this execution
self._connection = self._get_connection(variables)
self._connection.set_host_overrides(host=self._host)
self._handler = self._get_action_handler(connection=self._connection, templar=templar)
# And filter out any fields which were set to default(omit), and got the omit token value
omit_token = variables.get('omit')
if omit_token is not None:
self._task.args = dict(filter(lambda x: x[1] != omit_token, self._task.args.iteritems()))
# Read some values from the task, so that we can modify them if need be
retries = self._task.retries
if retries <= 0:
retries = 1
delay = self._task.delay
if delay < 0:
delay = 1
# make a copy of the job vars here, in case we need to update them
# with the registered variable value later on when testing conditions
vars_copy = variables.copy()
debug("starting attempt loop")
result = None
for attempt in range(retries):
if attempt > 0:
# FIXME: this should use the callback/message passing mechanism
print("FAILED - RETRYING: %s (%d retries left). Result was: %s" % (self._task, retries-attempt, result))
result['attempts'] = attempt + 1
debug("running the handler")
result = self._handler.run(task_vars=variables)
debug("handler run complete")
if self._task.async > 0:
# the async_wrapper module returns dumped JSON via its stdout
# response, so we parse it here and replace the result
try:
result = json.loads(result.get('stdout'))
except (TypeError, ValueError) as e:
return dict(failed=True, msg="The async task did not return valid JSON: %s" % str(e))
if self._task.poll > 0:
result = self._poll_async_result(result=result, templar=templar)
# update the local copy of vars with the registered value, if specified,
# or any facts which may have been generated by the module execution
if self._task.register:
vars_copy[self._task.register] = result
if 'ansible_facts' in result:
vars_copy.update(result['ansible_facts'])
# create a conditional object to evaluate task conditions
cond = Conditional(loader=self._loader)
# FIXME: make sure until is mutually exclusive with changed_when/failed_when
if self._task.until:
cond.when = self._task.until
if cond.evaluate_conditional(templar, vars_copy):
break
elif (self._task.changed_when or self._task.failed_when) and 'skipped' not in result:
if self._task.changed_when:
cond.when = [ self._task.changed_when ]
result['changed'] = cond.evaluate_conditional(templar, vars_copy)
if self._task.failed_when:
cond.when = [ self._task.failed_when ]
failed_when_result = cond.evaluate_conditional(templar, vars_copy)
result['failed_when_result'] = result['failed'] = failed_when_result
if failed_when_result:
break
elif 'failed' not in result:
if result.get('rc', 0) != 0:
result['failed'] = True
else:
# if the result is not failed, stop trying
break
if attempt < retries - 1:
time.sleep(delay)
# do the final update of the local variables here, for both registered
# values and any facts which may have been created
if self._task.register:
variables[self._task.register] = result
if 'ansible_facts' in result:
variables.update(result['ansible_facts'])
# save the notification target in the result, if it was specified, as
# this task may be running in a loop in which case the notification
# may be item-specific, ie. "notify: service {{item}}"
if self._task.notify is not None:
result['_ansible_notify'] = self._task.notify
# and return
debug("attempt loop complete, returning result")
return result
def _poll_async_result(self, result, templar):
'''
Polls for the specified JID to be complete
'''
async_jid = result.get('ansible_job_id')
if async_jid is None:
return dict(failed=True, msg="No job id was returned by the async task")
# Create a new psuedo-task to run the async_status module, and run
# that (with a sleep for "poll" seconds between each retry) until the
# async time limit is exceeded.
async_task = Task().load(dict(action='async_status jid=%s' % async_jid))
# Because this is an async task, the action handler is async. However,
# we need the 'normal' action handler for the status check, so get it
# now via the action_loader
normal_handler = action_loader.get(
'normal',
task=async_task,
connection=self._connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
time_left = self._task.async
while time_left > 0:
time.sleep(self._task.poll)
async_result = normal_handler.run()
if int(async_result.get('finished', 0)) == 1 or 'failed' in async_result or 'skipped' in async_result:
break
time_left -= self._task.poll
if int(async_result.get('finished', 0)) != 1:
return dict(failed=True, msg="async task did not complete within the requested time")
else:
return async_result
def _get_connection(self, variables):
'''
Reads the connection property for the host, and returns the
correct connection object from the list of connection plugins
'''
# FIXME: calculation of connection params/auth stuff should be done here
if not self._play_context.remote_addr:
self._play_context.remote_addr = self._host.ipv4_address
if self._task.delegate_to is not None:
self._compute_delegate(variables)
conn_type = self._play_context.connection
if conn_type == 'smart':
conn_type = 'ssh'
if sys.platform.startswith('darwin') and self._play_context.password:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
conn_type = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
try:
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if "Bad configuration option" in err:
conn_type = "paramiko"
except OSError:
conn_type = "paramiko"
connection = connection_loader.get(conn_type, self._play_context, self._new_stdin)
if not connection:
raise AnsibleError("the connection plugin '%s' was not found" % conn_type)
return connection
def _get_action_handler(self, connection, templar):
'''
Returns the correct action plugin to handle the requestion task action
'''
if self._task.action in action_loader:
if self._task.async != 0:
raise AnsibleError("async mode is not supported with the %s module" % module_name)
handler_name = self._task.action
elif self._task.async == 0:
handler_name = 'normal'
else:
handler_name = 'async'
handler = action_loader.get(
handler_name,
task=self._task,
connection=connection,
play_context=self._play_context,
loader=self._loader,
templar=templar,
shared_loader_obj=self._shared_loader_obj,
)
if not handler:
raise AnsibleError("the handler '%s' was not found" % handler_name)
return handler
def _compute_delegate(self, variables):
# get the vars for the delegate by its name
try:
this_info = variables['hostvars'][self._task.delegate_to]
# get the real ssh_address for the delegate and allow ansible_ssh_host to be templated
#self._play_context.remote_user = self._compute_delegate_user(self.delegate_to, delegate['inject'])
self._play_context.remote_addr = this_info.get('ansible_ssh_host', self._task.delegate_to)
self._play_context.port = this_info.get('ansible_ssh_port', self._play_context.port)
self._play_context.password = this_info.get('ansible_ssh_pass', self._play_context.password)
self._play_context.private_key_file = this_info.get('ansible_ssh_private_key_file', self._play_context.private_key_file)
self._play_context.connection = this_info.get('ansible_connection', C.DEFAULT_TRANSPORT)
self._play_context.become_pass = this_info.get('ansible_sudo_pass', self._play_context.become_pass)
except:
# make sure the inject is empty for non-inventory hosts
this_info = {}
if self._play_context.remote_addr in ('127.0.0.1', 'localhost'):
self._play_context.connection = 'local'
# Last chance to get private_key_file from global variables.
# this is useful if delegated host is not defined in the inventory
#if delegate['private_key_file'] is None:
# delegate['private_key_file'] = remote_inject.get('ansible_ssh_private_key_file', None)
#if delegate['private_key_file'] is not None:
# delegate['private_key_file'] = os.path.expanduser(delegate['private_key_file'])
for i in this_info:
if i.startswith("ansible_") and i.endswith("_interpreter"):
variables[i] = this_info[i]
|
gpl-3.0
|
camellhf/three.js
|
utils/exporters/blender/addons/io_three/exporter/_json.py
|
296
|
6976
|
import json
from .. import constants
ROUND = constants.DEFAULT_PRECISION
## THREE override function
def _json_floatstr(o):
if ROUND is not None:
o = round(o, ROUND)
return '%g' % o
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
ValueError=ValueError,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
str=str,
tuple=tuple,
):
'''
Overwrite json.encoder for Python 2.7 and above to not
assign each index of a list or tuple to its own row as
this is completely asinine behaviour
'''
## @THREE
# Override the function
_floatstr = _json_floatstr
if _indent is not None and not isinstance(_indent, str):
_indent = ' ' * _indent
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
## @THREEJS
# - block the moronic functionality that puts each
# index on its own line causing insane row counts
#if _indent is not None:
# _current_indent_level += 1
# newline_indent = '\n' + _indent * _current_indent_level
# separator = _item_separator + newline_indent
# buf += newline_indent
#else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, str):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, int):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + _indent * _current_indent_level
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = sorted(dct.items(), key=lambda kv: kv[0])
else:
items = dct.items()
for key, value in items:
if isinstance(key, str):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, int):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, str):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, int):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + _indent * _current_indent_level
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, str):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, int):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
# override the encoder
json.encoder._make_iterencode = _make_iterencode
|
mit
|
Venturi/cms
|
env/lib/python2.7/site-packages/app_data/forms.py
|
2
|
9733
|
try:
from operator import methodcaller
except ImportError:
methodcaller = lambda name: lambda o: getattr(o, name)()
from django.forms.forms import NON_FIELD_ERRORS, Form
from django.forms.formsets import formset_factory
from django.forms.models import modelform_factory, _get_foreign_key, BaseInlineFormSet, BaseModelFormSet
from django.utils.safestring import mark_safe
from django.utils import six
class AppDataForm(Form):
def __init__(self, app_container, data=None, files=None, fields=(), exclude=(), **kwargs):
self.app_container = app_container
super(AppDataForm, self).__init__(data, files, **kwargs)
if fields or exclude:
for f in self.fields.keys():
if fields and f not in fields:
del self.fields[f]
elif f in exclude:
del self.fields[f]
@property
def instance(self):
return self.app_container._instance
def save(self):
self.app_container.update(self.cleaned_data)
class BaseFieldsDescriptor(object):
" Combines the base_fiels and prefixes them properly. Descriptor because needed on class level. "
def __get__(self, instance, owner):
if not hasattr(self, '_base_fields'):
self._base_fields = bf = {}
# construct an empty model to get to the data container and thus to the form classes
app_container = getattr(owner.ModelForm._meta.model(), owner.app_data_field)
# all the fields form model_form
bf.update(owner.ModelForm.base_fields)
# go through all the app forms...
for label, opts in six.iteritems(owner.get_app_form_opts()):
Form = app_container[label].form_class
exclude = set(opts.get('exclude', ()))
fields = opts.get('fields', None)
for name, field in six.iteritems(Form.base_fields):
# skip proper fields
if fields is not None and name not in fields:
continue
if name in exclude:
continue
# prefix the fields
bf['%s.%s' % (label, name)] = field
return self._base_fields
class AppFormOptsDescriptor(object):
def __get__(self, instance, owner):
# we cannot check hasattr because parent's app_form_opts would pick it up
if not '_app_form_opts' in owner.__dict__:
setattr(owner, '_app_form_opts', {})
return owner._app_form_opts
class MultiForm(object):
app_data_field = 'app_data'
app_form_opts = AppFormOptsDescriptor()
def __init__(self, *args, **kwargs):
# construct the main model form
self.model_form = self.ModelForm(*args, **kwargs)
if self.model_form.is_bound:
data = self.model_form.data
files = self.model_form.files
else:
data, files = None, None
# construct all the app forms
self.app_forms = {}
app_container = getattr(self.model_form.instance, self.app_data_field)
for label, label_opts in six.iteritems(self.get_app_form_opts()):
prefix = label
if self.model_form.prefix:
prefix = '%s-%s' % (self.model_form.prefix, prefix)
self.app_forms[label] = app_container[label].get_form(data, files, prefix=prefix, **label_opts)
@classmethod
def get_app_form_opts(cls):
" Utility method to combinte app_form_opts from all base classes. "
# subclass may wish to remove superclass's app_form
skip_labels = set()
form_opts = {}
# go through class hierarchy and collect form definitions
for c in cls.mro():
# not a MultiForm, skip
if not hasattr(c, 'app_form_opts'):
continue
for label, label_opts in six.iteritems(c.app_form_opts):
if label in form_opts or label in skip_labels:
# form already defined, or should be skipped
continue
elif label_opts is None:
# mark as to-be-skipped
skip_labels.add(label)
else:
# add form def
form_opts[label] = label_opts
return form_opts
@classmethod
def add_form(cls, label, form_options={}):
" Add an app_data form to the multi form after it's creation. "
cls.app_form_opts[label] = form_options.copy()
@classmethod
def remove_form(cls, label):
"""
Remove an app_data form to the multi form after it's creation.
Even if this form would be specified in a superclass it would be skipped.
"""
cls.app_form_opts[label] = None
# properties delegated to model_form
@property
def _get_validation_exclusions(self):
return self.model_form._get_validation_exclusions
@property
def cleaned_data(self):
return self.model_form.cleaned_data
@property
def _meta(self):
# user by BaseInlineFormSet.add_fields
return self.model_form._meta
@property
def fields(self):
# user by BaseModelFormSet.add_fields
return self.model_form.fields
@property
def _raw_value(self):
# used by FormSet._should_delete_form
return self.model_form._raw_value
@property
def instance(self):
return self.model_form.instance
@property
def media(self):
return self.model_form.media
@property
def save_m2m(self):
return self.model_form.save_m2m
@property
def is_bound(self):
return self.model_form.is_bound
# methods combining outputs from all forms
base_fields = BaseFieldsDescriptor()
def _get_all_forms(self):
yield self.model_form
for f in six.itervalues(self.app_forms):
yield f
def __unicode__(self):
return self.as_table()
def as_ul(self):
return mark_safe(u'\n'.join(map(methodcaller('as_ul'), self._get_all_forms())))
def as_table(self):
return mark_safe(u'\n'.join(map(methodcaller('as_table'), self._get_all_forms())))
def as_p(self):
return mark_safe(u'\n'.join(map(methodcaller('as_p'), self._get_all_forms())))
def is_valid(self):
return all(map(methodcaller('is_valid'), self._get_all_forms()))
def has_changed(self):
return any(map(methodcaller('has_changed'), self._get_all_forms()))
def __getitem__(self, name):
# provide access to app.field as well
app = None
if '.' in name:
app, name = name.split('.', 1)
if app is None:
form = self.model_form
else:
try:
form = self.app_forms[app]
except KeyError:
raise KeyError('AppForm %r not found in MultiForm.' % name)
try:
field = form[name]
except KeyError:
raise KeyError('Field %r not found in Form %s' % (name, form.fields))
return field
@property
def changed_data(self):
if not hasattr(self, '_changed_data'):
self._changed_data = cd = self.model_form.changed_data[:]
for label, form in six.iteritems(self.app_forms):
cd.extend(map(lambda n: '%s.%s' % (label, n), form.changed_data))
return self._changed_data
@property
def errors(self):
# combine all the errors
if not hasattr(self, '_errors'):
self._errors = self.model_form.errors.copy()
for label, form in six.iteritems(self.app_forms):
for k, v in six.iteritems(form.errors):
if k == NON_FIELD_ERRORS:
self._errors.setdefault(k, self.model_form.error_class()).extend(v)
else:
self._errors['%s.%s' % (label, k)] = v
return self._errors
def non_field_errors(self):
return self.errors.get(NON_FIELD_ERRORS, self.model_form.error_class())
def save(self, **kwargs):
# save the app_data forms first
for f in self.app_forms.values():
f.save()
# save the model itself
return self.model_form.save(**kwargs)
def multiform_factory(model, multiform=MultiForm, app_data_field='app_data', name=None, form_opts={}, **kwargs):
model_form = modelform_factory(model, **kwargs)
name = name or '%sWithAppDataForm' % model_form._meta.model.__name__
return type(
name, (multiform, ),
{'ModelForm': model_form, 'app_data_field': app_data_field, '_app_form_opts': form_opts}
)
def multiformset_factory(model, multiform=MultiForm, app_data_field='app_data', name=None, form_opts={},
formset=BaseModelFormSet, extra=3, can_order=False, can_delete=True, max_num=None,
**kwargs):
multiform = multiform_factory(model, multiform, app_data_field, name, form_opts, **kwargs)
FormSet = formset_factory(multiform, formset=formset, extra=extra, can_order=can_order, can_delete=can_delete, max_num=max_num)
FormSet.model = model
return FormSet
def multiinlineformset_factory(parent_model, model, multiform=MultiForm, app_data_field='app_data', name=None, form_opts={},
formset=BaseInlineFormSet, fk_name=None, **kwargs):
fk = _get_foreign_key(parent_model, model, fk_name=fk_name)
if fk.unique:
kwargs['max_num'] = 1
FormSet = multiformset_factory(model, multiform, app_data_field, name, form_opts, formset=formset, **kwargs)
FormSet.fk = fk
return FormSet
|
gpl-2.0
|
rahul67/hue
|
desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/SelfTest/PublicKey/test_ElGamal.py
|
112
|
8507
|
# -*- coding: utf-8 -*-
#
# SelfTest/PublicKey/test_ElGamal.py: Self-test for the ElGamal primitive
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.PublicKey.ElGamal"""
__revision__ = "$Id$"
import unittest
from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex
from Crypto import Random
from Crypto.PublicKey import ElGamal
from Crypto.Util.number import *
from Crypto.Util.py3compat import *
class ElGamalTest(unittest.TestCase):
#
# Test vectors
#
# There seem to be no real ElGamal test vectors available in the
# public domain. The following test vectors have been generated
# with libgcrypt 1.5.0.
#
# Encryption
tve=[
{
# 256 bits
'p' :'BA4CAEAAED8CBE952AFD2126C63EB3B345D65C2A0A73D2A3AD4138B6D09BD933',
'g' :'05',
'y' :'60D063600ECED7C7C55146020E7A31C4476E9793BEAED420FEC9E77604CAE4EF',
'x' :'1D391BA2EE3C37FE1BA175A69B2C73A11238AD77675932',
'k' :'F5893C5BAB4131264066F57AB3D8AD89E391A0B68A68A1',
'pt' :'48656C6C6F207468657265',
'ct1':'32BFD5F487966CEA9E9356715788C491EC515E4ED48B58F0F00971E93AAA5EC7',
'ct2':'7BE8FBFF317C93E82FCEF9BD515284BA506603FEA25D01C0CB874A31F315EE68'
},
{
# 512 bits
'p' :'F1B18AE9F7B4E08FDA9A04832F4E919D89462FD31BF12F92791A93519F75076D6CE3942689CDFF2F344CAFF0F82D01864F69F3AECF566C774CBACF728B81A227',
'g' :'07',
'y' :'688628C676E4F05D630E1BE39D0066178CA7AA83836B645DE5ADD359B4825A12B02EF4252E4E6FA9BEC1DB0BE90F6D7C8629CABB6E531F472B2664868156E20C',
'x' :'14E60B1BDFD33436C0DA8A22FDC14A2CCDBBED0627CE68',
'k' :'38DBF14E1F319BDA9BAB33EEEADCAF6B2EA5250577ACE7',
'pt' :'48656C6C6F207468657265',
'ct1':'290F8530C2CC312EC46178724F196F308AD4C523CEABB001FACB0506BFED676083FE0F27AC688B5C749AB3CB8A80CD6F7094DBA421FB19442F5A413E06A9772B',
'ct2':'1D69AAAD1DC50493FB1B8E8721D621D683F3BF1321BE21BC4A43E11B40C9D4D9C80DE3AAC2AB60D31782B16B61112E68220889D53C4C3136EE6F6CE61F8A23A0'
}
]
# Signature
tvs=[
{
# 256 bits
'p' :'D2F3C41EA66530838A704A48FFAC9334F4701ECE3A97CEE4C69DD01AE7129DD7',
'g' :'05',
'y' :'C3F9417DC0DAFEA6A05C1D2333B7A95E63B3F4F28CC962254B3256984D1012E7',
'x' :'165E4A39BE44D5A2D8B1332D416BC559616F536BC735BB',
'k' :'C7F0C794A7EAD726E25A47FF8928013680E73C51DD3D7D99BFDA8F492585928F',
'h' :'48656C6C6F207468657265',
'sig1':'35CA98133779E2073EF31165AFCDEB764DD54E96ADE851715495F9C635E1E7C2',
'sig2':'0135B88B1151279FE5D8078D4FC685EE81177EE9802AB123A73925FC1CB059A7',
},
{
# 512 bits
'p' :'E24CF3A4B8A6AF749DCA6D714282FE4AABEEE44A53BB6ED15FBE32B5D3C3EF9CC4124A2ECA331F3C1C1B667ACA3766825217E7B5F9856648D95F05330C6A19CF',
'g' :'0B',
'y' :'2AD3A1049CA5D4ED207B2431C79A8719BB4073D4A94E450EA6CEE8A760EB07ADB67C0D52C275EE85D7B52789061EE45F2F37D9B2AE522A51C28329766BFE68AC',
'x' :'16CBB4F46D9ECCF24FF9F7E63CAA3BD8936341555062AB',
'k' :'8A3D89A4E429FD2476D7D717251FB79BF900FFE77444E6BB8299DC3F84D0DD57ABAB50732AE158EA52F5B9E7D8813E81FD9F79470AE22F8F1CF9AEC820A78C69',
'h' :'48656C6C6F207468657265',
'sig1':'BE001AABAFFF976EC9016198FBFEA14CBEF96B000CCC0063D3324016F9E91FE80D8F9325812ED24DDB2B4D4CF4430B169880B3CE88313B53255BD4EC0378586F',
'sig2':'5E266F3F837BA204E3BBB6DBECC0611429D96F8C7CE8F4EFDF9D4CB681C2A954468A357BF4242CEC7418B51DFC081BCD21299EF5B5A0DDEF3A139A1817503DDE',
}
]
def test_generate_128(self):
self._test_random_key(128)
def test_generate_512(self):
self._test_random_key(512)
def test_encryption(self):
for tv in self.tve:
for as_longs in (0,1):
d = self.convert_tv(tv, as_longs)
key = ElGamal.construct(d['key'])
ct = key.encrypt(d['pt'], d['k'])
self.assertEquals(ct[0], d['ct1'])
self.assertEquals(ct[1], d['ct2'])
def test_decryption(self):
for tv in self.tve:
for as_longs in (0,1):
d = self.convert_tv(tv, as_longs)
key = ElGamal.construct(d['key'])
pt = key.decrypt((d['ct1'], d['ct2']))
self.assertEquals(pt, d['pt'])
def test_signing(self):
for tv in self.tvs:
for as_longs in (0,1):
d = self.convert_tv(tv, as_longs)
key = ElGamal.construct(d['key'])
sig1, sig2 = key.sign(d['h'], d['k'])
self.assertEquals(sig1, d['sig1'])
self.assertEquals(sig2, d['sig2'])
def test_verification(self):
for tv in self.tvs:
for as_longs in (0,1):
d = self.convert_tv(tv, as_longs)
key = ElGamal.construct(d['key'])
# Positive test
res = key.verify( d['h'], (d['sig1'],d['sig2']) )
self.failUnless(res)
# Negative test
res = key.verify( d['h'], (d['sig1']+1,d['sig2']) )
self.failIf(res)
def convert_tv(self, tv, as_longs=0):
"""Convert a test vector from textual form (hexadecimal ascii
to either integers or byte strings."""
key_comps = 'p','g','y','x'
tv2 = {}
for c in tv.keys():
tv2[c] = a2b_hex(tv[c])
if as_longs or c in key_comps or c in ('sig1','sig2'):
tv2[c] = bytes_to_long(tv2[c])
tv2['key']=[]
for c in key_comps:
tv2['key'] += [tv2[c]]
del tv2[c]
return tv2
def _test_random_key(self, bits):
elgObj = ElGamal.generate(bits, Random.new().read)
self._check_private_key(elgObj)
self._exercise_primitive(elgObj)
pub = elgObj.publickey()
self._check_public_key(pub)
self._exercise_public_primitive(elgObj)
def _check_private_key(self, elgObj):
# Check capabilities
self.failUnless(elgObj.has_private())
self.failUnless(elgObj.can_sign())
self.failUnless(elgObj.can_encrypt())
# Sanity check key data
self.failUnless(1<elgObj.g<(elgObj.p-1))
self.assertEquals(pow(elgObj.g, elgObj.p-1, elgObj.p), 1)
self.failUnless(1<elgObj.x<(elgObj.p-1))
self.assertEquals(pow(elgObj.g, elgObj.x, elgObj.p), elgObj.y)
def _check_public_key(self, elgObj):
# Check capabilities
self.failIf(elgObj.has_private())
self.failUnless(elgObj.can_sign())
self.failUnless(elgObj.can_encrypt())
# Sanity check key data
self.failUnless(1<elgObj.g<(elgObj.p-1))
self.assertEquals(pow(elgObj.g, elgObj.p-1, elgObj.p), 1)
def _exercise_primitive(self, elgObj):
# Test encryption/decryption
plaintext = b("Test")
ciphertext = elgObj.encrypt(plaintext, 123456789L)
plaintextP = elgObj.decrypt(ciphertext)
self.assertEquals(plaintext, plaintextP)
# Test signature/verification
signature = elgObj.sign(plaintext, 987654321L)
elgObj.verify(plaintext, signature)
def _exercise_public_primitive(self, elgObj):
plaintext = b("Test")
ciphertext = elgObj.encrypt(plaintext, 123456789L)
def get_tests(config={}):
tests = []
tests += list_test_cases(ElGamalTest)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
|
apache-2.0
|
B-MOOC/edx-platform
|
lms/djangoapps/course_wiki/middleware.py
|
84
|
4980
|
"""Middleware for course_wiki"""
from urlparse import urlparse
from django.conf import settings
from django.http import Http404
from django.shortcuts import redirect
from django.core.exceptions import PermissionDenied
from wiki.models import reverse
from courseware.courses import get_course_with_access
from courseware.access import has_access
from student.models import CourseEnrollment
from util.request import course_id_from_url
class WikiAccessMiddleware(object):
"""
This middleware wraps calls to django-wiki in order to handle authentication and redirection
between the root wiki and the course wikis.
TODO: removing the "root wiki" would obviate the need for this middleware; it could be replaced
with a wrapper function around the wiki views. This is currently difficult or impossible to do
because there are two sets of wiki urls loaded in urls.py
"""
def _redirect_from_referrer(self, request, wiki_path):
"""
redirect to course wiki url if the referrer is from a course page
"""
course_id = course_id_from_url(request.META.get('HTTP_REFERER'))
if course_id:
# See if we are able to view the course. If we are, redirect to it
try:
_course = get_course_with_access(request.user, 'load', course_id)
return redirect("/courses/{course_id}/wiki/{path}".format(course_id=course_id.to_deprecated_string(), path=wiki_path))
except Http404:
# Even though we came from the course, we can't see it. So don't worry about it.
pass
def process_view(self, request, view_func, view_args, view_kwargs): # pylint: disable=unused-argument
"""
This function handles authentication logic for wiki urls and redirects from
the "root wiki" to the "course wiki" if the user accesses the wiki from a course url
"""
# we care only about requests to wiki urls
if not view_func.__module__.startswith('wiki.'):
return
# wiki pages are login required
if not request.user.is_authenticated():
return redirect(reverse('signin_user'), next=request.path)
course_id = course_id_from_url(request.path)
wiki_path = request.path.partition('/wiki/')[2]
if course_id:
# This is a /courses/org/name/run/wiki request
course_path = "/courses/{}".format(course_id.to_deprecated_string())
# HACK: django-wiki monkeypatches the reverse function to enable
# urls to be rewritten
reverse._transform_url = lambda url: course_path + url # pylint: disable=protected-access
# Authorization Check
# Let's see if user is enrolled or the course allows for public access
try:
course = get_course_with_access(request.user, 'load', course_id)
except Http404:
# course does not exist. redirect to root wiki.
# clearing the referrer will cause process_response not to redirect
# back to a non-existent course
request.META['HTTP_REFERER'] = ''
return redirect('/wiki/{}'.format(wiki_path))
if not course.allow_public_wiki_access:
is_enrolled = CourseEnrollment.is_enrolled(request.user, course.id)
is_staff = has_access(request.user, 'staff', course)
if not (is_enrolled or is_staff):
# if a user is logged in, but not authorized to see a page,
# we'll redirect them to the course about page
return redirect('about_course', course_id.to_deprecated_string())
# set the course onto here so that the wiki template can show the course navigation
request.course = course
else:
# this is a request for /wiki/...
# Check to see if we don't allow top-level access to the wiki via the /wiki/xxxx/yyy/zzz URLs
# this will help prevent people from writing pell-mell to the Wiki in an unstructured way
if not settings.FEATURES.get('ALLOW_WIKI_ROOT_ACCESS', False):
raise PermissionDenied()
return self._redirect_from_referrer(request, wiki_path)
def process_response(self, request, response):
"""
Modify the redirect from /wiki/123 to /course/foo/bar/wiki/123/
if the referrer comes from a course page
"""
if response.status_code == 302 and response['Location'].startswith('/wiki/'):
wiki_path = urlparse(response['Location']).path.split('/wiki/', 1)[1]
response = self._redirect_from_referrer(request, wiki_path) or response
# END HACK: _transform_url must be set to a no-op function after it's done its work
reverse._transform_url = lambda url: url # pylint: disable=protected-access
return response
|
agpl-3.0
|
matuu/pyafipws
|
formatos/formato_json.py
|
16
|
1324
|
#!/usr/bin/python
# -*- coding: latin-1 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTIBILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
"Módulo para manejo de archivos JSON"
__author__ = "Mariano Reingart (reingart@gmail.com)"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "GPL 3.0"
from decimal import Decimal
try:
import json
except ImportError:
try:
import simplejson as json
except:
print "para soporte de JSON debe instalar simplejson"
def leer(fn="entrada.json"):
"Analiza un archivo JSON y devuelve un diccionario (confia en que el json este ok)"
items = []
jsonfile = open(fn, "rb")
regs = json.load(jsonfile)
return regs
def escribir(filas, fn="salida.json"):
"Dado una lista de comprobantes (diccionarios), escribe JSON"
jsonfile = open(fn,"wb")
json.dump(filas, jsonfile, sort_keys=True, indent=4)
jsonfile.close()
|
gpl-3.0
|
wbchen99/bitcoin-hnote0
|
qa/rpc-tests/test_framework/bignum.py
|
123
|
1929
|
#!/usr/bin/env python3
#
# bignum.py
#
# This file is copied from python-bitcoinlib.
#
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bignum routines"""
import struct
# generic big endian MPI format
def bn_bytes(v, have_ext=False):
ext = 0
if have_ext:
ext = 1
return ((v.bit_length()+7)//8) + ext
def bn2bin(v):
s = bytearray()
i = bn_bytes(v)
while i > 0:
s.append((v >> ((i-1) * 8)) & 0xff)
i -= 1
return s
def bin2bn(s):
l = 0
for ch in s:
l = (l << 8) | ch
return l
def bn2mpi(v):
have_ext = False
if v.bit_length() > 0:
have_ext = (v.bit_length() & 0x07) == 0
neg = False
if v < 0:
neg = True
v = -v
s = struct.pack(b">I", bn_bytes(v, have_ext))
ext = bytearray()
if have_ext:
ext.append(0)
v_bin = bn2bin(v)
if neg:
if have_ext:
ext[0] |= 0x80
else:
v_bin[0] |= 0x80
return s + ext + v_bin
def mpi2bn(s):
if len(s) < 4:
return None
s_size = bytes(s[:4])
v_len = struct.unpack(b">I", s_size)[0]
if len(s) != (v_len + 4):
return None
if v_len == 0:
return 0
v_str = bytearray(s[4:])
neg = False
i = v_str[0]
if i & 0x80:
neg = True
i &= ~0x80
v_str[0] = i
v = bin2bn(v_str)
if neg:
return -v
return v
# bitcoin-specific little endian format, with implicit size
def mpi2vch(s):
r = s[4:] # strip size
r = r[::-1] # reverse string, converting BE->LE
return r
def bn2vch(v):
return bytes(mpi2vch(bn2mpi(v)))
def vch2mpi(s):
r = struct.pack(b">I", len(s)) # size
r += s[::-1] # reverse string, converting LE->BE
return r
def vch2bn(s):
return mpi2bn(vch2mpi(s))
|
mit
|
Innovahn/cybex
|
addons/crm/base_partner_merge.py
|
75
|
30912
|
#!/usr/bin/env python
from __future__ import absolute_import
from email.utils import parseaddr
import functools
import htmlentitydefs
import itertools
import logging
import operator
import psycopg2
import re
from ast import literal_eval
from openerp.tools import mute_logger
# Validation Library https://pypi.python.org/pypi/validate_email/1.1
from .validate_email import validate_email
import openerp
from openerp.osv import osv, orm
from openerp.osv import fields
from openerp.osv.orm import browse_record
from openerp.tools.translate import _
pattern = re.compile("&(\w+?);")
_logger = logging.getLogger('base.partner.merge')
# http://www.php2python.com/wiki/function.html-entity-decode/
def html_entity_decode_char(m, defs=htmlentitydefs.entitydefs):
try:
return defs[m.group(1)]
except KeyError:
return m.group(0)
def html_entity_decode(string):
return pattern.sub(html_entity_decode_char, string)
def sanitize_email(email):
assert isinstance(email, basestring) and email
result = re.subn(r';|/|:', ',',
html_entity_decode(email or ''))[0].split(',')
emails = [parseaddr(email)[1]
for item in result
for email in item.split()]
return [email.lower()
for email in emails
if validate_email(email)]
def is_integer_list(ids):
return all(isinstance(i, (int, long)) for i in ids)
class ResPartner(osv.Model):
_inherit = 'res.partner'
_columns = {
'id': fields.integer('Id', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
}
class MergePartnerLine(osv.TransientModel):
_name = 'base.partner.merge.line'
_columns = {
'wizard_id': fields.many2one('base.partner.merge.automatic.wizard',
'Wizard'),
'min_id': fields.integer('MinID'),
'aggr_ids': fields.char('Ids', required=True),
}
_order = 'min_id asc'
class MergePartnerAutomatic(osv.TransientModel):
"""
The idea behind this wizard is to create a list of potential partners to
merge. We use two objects, the first one is the wizard for the end-user.
And the second will contain the partner list to merge.
"""
_name = 'base.partner.merge.automatic.wizard'
_columns = {
# Group by
'group_by_email': fields.boolean('Email'),
'group_by_name': fields.boolean('Name'),
'group_by_is_company': fields.boolean('Is Company'),
'group_by_vat': fields.boolean('VAT'),
'group_by_parent_id': fields.boolean('Parent Company'),
'state': fields.selection([('option', 'Option'),
('selection', 'Selection'),
('finished', 'Finished')],
'State',
readonly=True,
required=True),
'number_group': fields.integer("Group of Contacts", readonly=True),
'current_line_id': fields.many2one('base.partner.merge.line', 'Current Line'),
'line_ids': fields.one2many('base.partner.merge.line', 'wizard_id', 'Lines'),
'partner_ids': fields.many2many('res.partner', string='Contacts'),
'dst_partner_id': fields.many2one('res.partner', string='Destination Contact'),
'exclude_contact': fields.boolean('A user associated to the contact'),
'exclude_journal_item': fields.boolean('Journal Items associated to the contact'),
'maximum_group': fields.integer("Maximum of Group of Contacts"),
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(MergePartnerAutomatic, self).default_get(cr, uid, fields, context)
if context.get('active_model') == 'res.partner' and context.get('active_ids'):
partner_ids = context['active_ids']
res['state'] = 'selection'
res['partner_ids'] = partner_ids
res['dst_partner_id'] = self._get_ordered_partner(cr, uid, partner_ids, context=context)[-1].id
return res
_defaults = {
'state': 'option'
}
def get_fk_on(self, cr, table):
q = """ SELECT cl1.relname as table,
att1.attname as column
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND cl2.relname = %s
AND att2.attname = 'id'
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND con.contype = 'f'
"""
return cr.execute(q, (table,))
def _update_foreign_keys(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_foreign_keys for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
# find the many2one relation to a partner
proxy = self.pool.get('res.partner')
self.get_fk_on(cr, 'res_partner')
# ignore two tables
for table, column in cr.fetchall():
if 'base_partner_merge_' in table:
continue
partner_ids = tuple(map(int, src_partners))
query = "SELECT column_name FROM information_schema.columns WHERE table_name LIKE '%s'" % (table)
cr.execute(query, ())
columns = []
for data in cr.fetchall():
if data[0] != column:
columns.append(data[0])
query_dic = {
'table': table,
'column': column,
'value': columns[0],
}
if len(columns) <= 1:
# unique key treated
query = """
UPDATE "%(table)s" as ___tu
SET %(column)s = %%s
WHERE
%(column)s = %%s AND
NOT EXISTS (
SELECT 1
FROM "%(table)s" as ___tw
WHERE
%(column)s = %%s AND
___tu.%(value)s = ___tw.%(value)s
)""" % query_dic
for partner_id in partner_ids:
cr.execute(query, (dst_partner.id, partner_id, dst_partner.id))
else:
try:
with mute_logger('openerp.sql_db'), cr.savepoint():
query = 'UPDATE "%(table)s" SET %(column)s = %%s WHERE %(column)s IN %%s' % query_dic
cr.execute(query, (dst_partner.id, partner_ids,))
if column == proxy._parent_name and table == 'res_partner':
query = """
WITH RECURSIVE cycle(id, parent_id) AS (
SELECT id, parent_id FROM res_partner
UNION
SELECT cycle.id, res_partner.parent_id
FROM res_partner, cycle
WHERE res_partner.id = cycle.parent_id AND
cycle.id != cycle.parent_id
)
SELECT id FROM cycle WHERE id = parent_id AND id = %s
"""
cr.execute(query, (dst_partner.id,))
except psycopg2.Error:
# updating fails, most likely due to a violated unique constraint
# keeping record with nonexistent partner_id is useless, better delete it
query = 'DELETE FROM %(table)s WHERE %(column)s = %%s' % query_dic
cr.execute(query, (partner_id,))
def _update_reference_fields(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_reference_fields for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
def update_records(model, src, field_model='model', field_id='res_id', context=None):
proxy = self.pool.get(model)
if proxy is None:
return
domain = [(field_model, '=', 'res.partner'), (field_id, '=', src.id)]
ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
try:
with mute_logger('openerp.sql_db'), cr.savepoint():
return proxy.write(cr, openerp.SUPERUSER_ID, ids, {field_id: dst_partner.id}, context=context)
except psycopg2.Error:
# updating fails, most likely due to a violated unique constraint
# keeping record with nonexistent partner_id is useless, better delete it
return proxy.unlink(cr, openerp.SUPERUSER_ID, ids, context=context)
update_records = functools.partial(update_records, context=context)
for partner in src_partners:
update_records('calendar', src=partner, field_model='model_id.model')
update_records('ir.attachment', src=partner, field_model='res_model')
update_records('mail.followers', src=partner, field_model='res_model')
update_records('mail.message', src=partner)
update_records('marketing.campaign.workitem', src=partner, field_model='object_id.model')
update_records('ir.model.data', src=partner)
proxy = self.pool['ir.model.fields']
domain = [('ttype', '=', 'reference')]
record_ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
for record in proxy.browse(cr, openerp.SUPERUSER_ID, record_ids, context=context):
try:
proxy_model = self.pool[record.model]
field_type = proxy_model._columns[record.name].__class__._type
except KeyError:
# unknown model or field => skip
continue
if field_type == 'function':
continue
for partner in src_partners:
domain = [
(record.name, '=', 'res.partner,%d' % partner.id)
]
model_ids = proxy_model.search(cr, openerp.SUPERUSER_ID, domain, context=context)
values = {
record.name: 'res.partner,%d' % dst_partner.id,
}
proxy_model.write(cr, openerp.SUPERUSER_ID, model_ids, values, context=context)
def _update_values(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_values for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
columns = dst_partner._columns
def write_serializer(column, item):
if isinstance(item, browse_record):
return item.id
else:
return item
values = dict()
for column, field in columns.iteritems():
if field._type not in ('many2many', 'one2many') and not isinstance(field, fields.function):
for item in itertools.chain(src_partners, [dst_partner]):
if item[column]:
values[column] = write_serializer(column, item[column])
values.pop('id', None)
parent_id = values.pop('parent_id', None)
dst_partner.write(values)
if parent_id and parent_id != dst_partner.id:
try:
dst_partner.write({'parent_id': parent_id})
except (osv.except_osv, orm.except_orm):
_logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id)
@mute_logger('openerp.osv.expression', 'openerp.models')
def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None):
proxy = self.pool.get('res.partner')
partner_ids = proxy.exists(cr, uid, list(partner_ids), context=context)
if len(partner_ids) < 2:
return
if len(partner_ids) > 3:
raise osv.except_osv(_('Error'), _("For safety reasons, you cannot merge more than 3 contacts together. You can re-open the wizard several times if needed."))
if openerp.SUPERUSER_ID != uid and len(set(partner.email for partner in proxy.browse(cr, uid, partner_ids, context=context))) > 1:
raise osv.except_osv(_('Error'), _("All contacts must have the same email. Only the Administrator can merge contacts with different emails."))
if dst_partner and dst_partner.id in partner_ids:
src_partners = proxy.browse(cr, uid, [id for id in partner_ids if id != dst_partner.id], context=context)
else:
ordered_partners = self._get_ordered_partner(cr, uid, partner_ids, context)
dst_partner = ordered_partners[-1]
src_partners = ordered_partners[:-1]
_logger.info("dst_partner: %s", dst_partner.id)
if openerp.SUPERUSER_ID != uid and self._model_is_installed(cr, uid, 'account.move.line', context=context) and \
self.pool.get('account.move.line').search(cr, openerp.SUPERUSER_ID, [('partner_id', 'in', [partner.id for partner in src_partners])], context=context):
raise osv.except_osv(_('Error'), _("Only the destination contact may be linked to existing Journal Items. Please ask the Administrator if you need to merge several contacts linked to existing Journal Items."))
call_it = lambda function: function(cr, uid, src_partners, dst_partner,
context=context)
call_it(self._update_foreign_keys)
call_it(self._update_reference_fields)
call_it(self._update_values)
_logger.info('(uid = %s) merged the partners %r with %s', uid, list(map(operator.attrgetter('id'), src_partners)), dst_partner.id)
dst_partner.message_post(body='%s %s'%(_("Merged with the following partners:"), ", ".join('%s<%s>(ID %s)' % (p.name, p.email or 'n/a', p.id) for p in src_partners)))
for partner in src_partners:
partner.unlink()
def clean_emails(self, cr, uid, context=None):
"""
Clean the email address of the partner, if there is an email field with
a mimum of two addresses, the system will create a new partner, with the
information of the previous one and will copy the new cleaned email into
the email field.
"""
context = dict(context or {})
proxy_model = self.pool['ir.model.fields']
field_ids = proxy_model.search(cr, uid, [('model', '=', 'res.partner'),
('ttype', 'like', '%2many')],
context=context)
fields = proxy_model.read(cr, uid, field_ids, context=context)
reset_fields = dict((field['name'], []) for field in fields)
proxy_partner = self.pool['res.partner']
context['active_test'] = False
ids = proxy_partner.search(cr, uid, [], context=context)
fields = ['name', 'var' 'partner_id' 'is_company', 'email']
partners = proxy_partner.read(cr, uid, ids, fields, context=context)
partners.sort(key=operator.itemgetter('id'))
partners_len = len(partners)
_logger.info('partner_len: %r', partners_len)
for idx, partner in enumerate(partners):
if not partner['email']:
continue
percent = (idx / float(partners_len)) * 100.0
_logger.info('idx: %r', idx)
_logger.info('percent: %r', percent)
try:
emails = sanitize_email(partner['email'])
head, tail = emails[:1], emails[1:]
email = head[0] if head else False
proxy_partner.write(cr, uid, [partner['id']],
{'email': email}, context=context)
for email in tail:
values = dict(reset_fields, email=email)
proxy_partner.copy(cr, uid, partner['id'], values,
context=context)
except Exception:
_logger.exception("There is a problem with this partner: %r", partner)
raise
return True
def close_cb(self, cr, uid, ids, context=None):
return {'type': 'ir.actions.act_window_close'}
def _generate_query(self, fields, maximum_group=100):
sql_fields = []
for field in fields:
if field in ['email', 'name']:
sql_fields.append('lower(%s)' % field)
elif field in ['vat']:
sql_fields.append("replace(%s, ' ', '')" % field)
else:
sql_fields.append(field)
group_fields = ', '.join(sql_fields)
filters = []
for field in fields:
if field in ['email', 'name', 'vat']:
filters.append((field, 'IS NOT', 'NULL'))
criteria = ' AND '.join('%s %s %s' % (field, operator, value)
for field, operator, value in filters)
text = [
"SELECT min(id), array_agg(id)",
"FROM res_partner",
]
if criteria:
text.append('WHERE %s' % criteria)
text.extend([
"GROUP BY %s" % group_fields,
"HAVING COUNT(*) >= 2",
"ORDER BY min(id)",
])
if maximum_group:
text.extend([
"LIMIT %s" % maximum_group,
])
return ' '.join(text)
def _compute_selected_groupby(self, this):
group_by_str = 'group_by_'
group_by_len = len(group_by_str)
fields = [
key[group_by_len:]
for key in self._columns.keys()
if key.startswith(group_by_str)
]
groups = [
field
for field in fields
if getattr(this, '%s%s' % (group_by_str, field), False)
]
if not groups:
raise osv.except_osv(_('Error'),
_("You have to specify a filter for your selection"))
return groups
def next_cb(self, cr, uid, ids, context=None):
"""
Don't compute any thing
"""
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def _get_ordered_partner(self, cr, uid, partner_ids, context=None):
partners = self.pool.get('res.partner').browse(cr, uid, list(partner_ids), context=context)
ordered_partners = sorted(sorted(partners,
key=operator.attrgetter('create_date'), reverse=True),
key=operator.attrgetter('active'), reverse=True)
return ordered_partners
def _next_screen(self, cr, uid, this, context=None):
this.refresh()
values = {}
if this.line_ids:
# in this case, we try to find the next record.
current_line = this.line_ids[0]
current_partner_ids = literal_eval(current_line.aggr_ids)
values.update({
'current_line_id': current_line.id,
'partner_ids': [(6, 0, current_partner_ids)],
'dst_partner_id': self._get_ordered_partner(cr, uid, current_partner_ids, context)[-1].id,
'state': 'selection',
})
else:
values.update({
'current_line_id': False,
'partner_ids': [],
'state': 'finished',
})
this.write(values)
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def _model_is_installed(self, cr, uid, model, context=None):
proxy = self.pool.get('ir.model')
domain = [('model', '=', model)]
return proxy.search_count(cr, uid, domain, context=context) > 0
def _partner_use_in(self, cr, uid, aggr_ids, models, context=None):
"""
Check if there is no occurence of this group of partner in the selected
model
"""
for model, field in models.iteritems():
proxy = self.pool.get(model)
domain = [(field, 'in', aggr_ids)]
if proxy.search_count(cr, uid, domain, context=context):
return True
return False
def compute_models(self, cr, uid, ids, context=None):
"""
Compute the different models needed by the system if you want to exclude
some partners.
"""
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
models = {}
if this.exclude_contact:
models['res.users'] = 'partner_id'
if self._model_is_installed(cr, uid, 'account.move.line', context=context) and this.exclude_journal_item:
models['account.move.line'] = 'partner_id'
return models
def _process_query(self, cr, uid, ids, query, context=None):
"""
Execute the select request and write the result in this wizard
"""
proxy = self.pool.get('base.partner.merge.line')
this = self.browse(cr, uid, ids[0], context=context)
models = self.compute_models(cr, uid, ids, context=context)
cr.execute(query)
counter = 0
for min_id, aggr_ids in cr.fetchall():
if models and self._partner_use_in(cr, uid, aggr_ids, models, context=context):
continue
values = {
'wizard_id': this.id,
'min_id': min_id,
'aggr_ids': aggr_ids,
}
proxy.create(cr, uid, values, context=context)
counter += 1
values = {
'state': 'selection',
'number_group': counter,
}
this.write(values)
_logger.info("counter: %s", counter)
def start_process_cb(self, cr, uid, ids, context=None):
"""
Start the process.
* Compute the selected groups (with duplication)
* If the user has selected the 'exclude_XXX' fields, avoid the partners.
"""
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
groups = self._compute_selected_groupby(this)
query = self._generate_query(groups, this.maximum_group)
self._process_query(cr, uid, ids, query, context=context)
return self._next_screen(cr, uid, this, context)
def automatic_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
this.start_process_cb()
this.refresh()
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def parent_migration_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
query = """
SELECT
min(p1.id),
array_agg(DISTINCT p1.id)
FROM
res_partner as p1
INNER join
res_partner as p2
ON
p1.email = p2.email AND
p1.name = p2.name AND
(p1.parent_id = p2.id OR p1.id = p2.parent_id)
WHERE
p2.id IS NOT NULL
GROUP BY
p1.email,
p1.name,
CASE WHEN p1.parent_id = p2.id THEN p2.id
ELSE p1.id
END
HAVING COUNT(*) >= 2
ORDER BY
min(p1.id)
"""
self._process_query(cr, uid, ids, query, context=context)
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL,
parent_id = NULL
WHERE
parent_id = id
""")
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def update_all_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# WITH RECURSIVE cycle(id, parent_id) AS (
# SELECT id, parent_id FROM res_partner
# UNION
# SELECT cycle.id, res_partner.parent_id
# FROM res_partner, cycle
# WHERE res_partner.id = cycle.parent_id AND
# cycle.id != cycle.parent_id
# )
# UPDATE res_partner
# SET parent_id = NULL
# WHERE id in (SELECT id FROM cycle WHERE id = parent_id);
this = self.browse(cr, uid, ids[0], context=context)
self.parent_migration_process_cb(cr, uid, ids, context=None)
list_merge = [
{'group_by_vat': True, 'group_by_email': True, 'group_by_name': True},
# {'group_by_name': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_email': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_name': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True},
# {'group_by_name': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True}
]
for merge_value in list_merge:
id = self.create(cr, uid, merge_value, context=context)
self.automatic_process_cb(cr, uid, [id], context=context)
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL
WHERE
parent_id IS NOT NULL AND
is_company IS NOT NULL
""")
# cr.execute("""
# UPDATE
# res_partner as p1
# SET
# is_company = NULL,
# parent_id = (
# SELECT p2.id
# FROM res_partner as p2
# WHERE p2.email = p1.email AND
# p2.parent_id != p2.id
# LIMIT 1
# )
# WHERE
# p1.parent_id = p1.id
# """)
return self._next_screen(cr, uid, this, context)
def merge_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
partner_ids = set(map(int, this.partner_ids))
if not partner_ids:
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
self._merge(cr, uid, partner_ids, this.dst_partner_id, context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def auto_set_parent_id(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# select partner who have one least invoice
partner_treated = ['@gmail.com']
cr.execute(""" SELECT p.id, p.email
FROM res_partner as p
LEFT JOIN account_invoice as a
ON p.id = a.partner_id AND a.state in ('open','paid')
WHERE p.grade_id is NOT NULL
GROUP BY p.id
ORDER BY COUNT(a.id) DESC
""")
re_email = re.compile(r".*@")
for id, email in cr.fetchall():
# check email domain
email = re_email.sub("@", email or "")
if not email or email in partner_treated:
continue
partner_treated.append(email)
# don't update the partners if they are more of one who have invoice
cr.execute(""" SELECT *
FROM res_partner as p
WHERE p.id != %s AND p.email LIKE '%%%s' AND
EXISTS (SELECT * FROM account_invoice as a WHERE p.id = a.partner_id AND a.state in ('open','paid'))
""" % (id, email))
if len(cr.fetchall()) > 1:
_logger.info("%s MORE OF ONE COMPANY", email)
continue
# to display changed values
cr.execute(""" SELECT id,email
FROM res_partner
WHERE parent_id != %s AND id != %s AND email LIKE '%%%s'
""" % (id, id, email))
_logger.info("%r", cr.fetchall())
# upgrade
cr.execute(""" UPDATE res_partner
SET parent_id = %s
WHERE id != %s AND email LIKE '%%%s'
""" % (id, id, email))
return False
|
agpl-3.0
|
dagwieers/ansible
|
test/integration/targets/module_utils/library/test.py
|
172
|
2948
|
#!/usr/bin/python
# Most of these names are only available via PluginLoader so pylint doesn't
# know they exist
# pylint: disable=no-name-in-module
results = {}
# Test import with no from
import ansible.module_utils.foo0
results['foo0'] = ansible.module_utils.foo0.data
# Test depthful import with no from
import ansible.module_utils.bar0.foo
results['bar0'] = ansible.module_utils.bar0.foo.data
# Test import of module_utils/foo1.py
from ansible.module_utils import foo1
results['foo1'] = foo1.data
# Test import of an identifier inside of module_utils/foo2.py
from ansible.module_utils.foo2 import data
results['foo2'] = data
# Test import of module_utils/bar1/__init__.py
from ansible.module_utils import bar1
results['bar1'] = bar1.data
# Test import of an identifier inside of module_utils/bar2/__init__.py
from ansible.module_utils.bar2 import data
results['bar2'] = data
# Test import of module_utils/baz1/one.py
from ansible.module_utils.baz1 import one
results['baz1'] = one.data
# Test import of an identifier inside of module_utils/baz2/one.py
from ansible.module_utils.baz2.one import data
results['baz2'] = data
# Test import of module_utils/spam1/ham/eggs/__init__.py
from ansible.module_utils.spam1.ham import eggs
results['spam1'] = eggs.data
# Test import of an identifier inside module_utils/spam2/ham/eggs/__init__.py
from ansible.module_utils.spam2.ham.eggs import data
results['spam2'] = data
# Test import of module_utils/spam3/ham/bacon.py
from ansible.module_utils.spam3.ham import bacon
results['spam3'] = bacon.data
# Test import of an identifier inside of module_utils/spam4/ham/bacon.py
from ansible.module_utils.spam4.ham.bacon import data
results['spam4'] = data
# Test import of module_utils.spam5.ham bacon and eggs (modules)
from ansible.module_utils.spam5.ham import bacon, eggs
results['spam5'] = (bacon.data, eggs.data)
# Test import of module_utils.spam6.ham bacon and eggs (identifiers)
from ansible.module_utils.spam6.ham import bacon, eggs
results['spam6'] = (bacon, eggs)
# Test import of module_utils.spam7.ham bacon and eggs (module and identifier)
from ansible.module_utils.spam7.ham import bacon, eggs
results['spam7'] = (bacon.data, eggs)
# Test import of module_utils/spam8/ham/bacon.py and module_utils/spam8/ham/eggs.py separately
from ansible.module_utils.spam8.ham import bacon
from ansible.module_utils.spam8.ham import eggs
results['spam8'] = (bacon.data, eggs)
# Test that import of module_utils/qux1/quux.py using as works
from ansible.module_utils.qux1 import quux as one
results['qux1'] = one.data
# Test that importing qux2/quux.py and qux2/quuz.py using as works
from ansible.module_utils.qux2 import quux as one, quuz as two
results['qux2'] = (one.data, two.data)
# Test depth
from ansible.module_utils.a.b.c.d.e.f.g.h import data
results['abcdefgh'] = data
from ansible.module_utils.basic import AnsibleModule
AnsibleModule(argument_spec=dict()).exit_json(**results)
|
gpl-3.0
|
anirudhjayaraman/scikit-learn
|
sklearn/semi_supervised/tests/test_label_propagation.py
|
307
|
1974
|
""" test the label propagation module """
import nose
import numpy as np
from sklearn.semi_supervised import label_propagation
from numpy.testing import assert_array_almost_equal
from numpy.testing import assert_array_equal
ESTIMATORS = [
(label_propagation.LabelPropagation, {'kernel': 'rbf'}),
(label_propagation.LabelPropagation, {'kernel': 'knn', 'n_neighbors': 2}),
(label_propagation.LabelSpreading, {'kernel': 'rbf'}),
(label_propagation.LabelSpreading, {'kernel': 'knn', 'n_neighbors': 2})
]
def test_fit_transduction():
samples = [[1., 0.], [0., 2.], [1., 3.]]
labels = [0, 1, -1]
for estimator, parameters in ESTIMATORS:
clf = estimator(**parameters).fit(samples, labels)
nose.tools.assert_equal(clf.transduction_[2], 1)
def test_distribution():
samples = [[1., 0.], [0., 1.], [1., 1.]]
labels = [0, 1, -1]
for estimator, parameters in ESTIMATORS:
clf = estimator(**parameters).fit(samples, labels)
if parameters['kernel'] == 'knn':
continue # unstable test; changes in k-NN ordering break it
assert_array_almost_equal(clf.predict_proba([[1., 0.0]]),
np.array([[1., 0.]]), 2)
else:
assert_array_almost_equal(np.asarray(clf.label_distributions_[2]),
np.array([.5, .5]), 2)
def test_predict():
samples = [[1., 0.], [0., 2.], [1., 3.]]
labels = [0, 1, -1]
for estimator, parameters in ESTIMATORS:
clf = estimator(**parameters).fit(samples, labels)
assert_array_equal(clf.predict([[0.5, 2.5]]), np.array([1]))
def test_predict_proba():
samples = [[1., 0.], [0., 1.], [1., 2.5]]
labels = [0, 1, -1]
for estimator, parameters in ESTIMATORS:
clf = estimator(**parameters).fit(samples, labels)
assert_array_almost_equal(clf.predict_proba([[1., 1.]]),
np.array([[0.5, 0.5]]))
|
bsd-3-clause
|
B-MOOC/edx-platform
|
lms/envs/static.py
|
100
|
2249
|
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=wildcard-import, unused-wildcard-import
from .common import *
from openedx.core.lib.logsettings import get_logger_config
STATIC_GRAB = True
LOGGING = get_logger_config(ENV_ROOT / "log",
logging_env="dev",
tracking_filename="tracking.log",
debug=False)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ENV_ROOT / "db" / "edx.db",
}
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
}
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
############################ FILE UPLOADS (for discussion forums) #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = ENV_ROOT / "uploads"
MEDIA_URL = "/discussion/upfiles/"
FILE_UPLOAD_TEMP_DIR = ENV_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
|
agpl-3.0
|
tvalacarta/tvalacarta
|
python/main-classic/core/jsontools.py
|
3
|
10591
|
# -*- coding: utf-8 -*-
# ------------------------------------------------------------
# tvalacarta 4
# Copyright 2015 tvalacarta@gmail.com
# http://blog.tvalacarta.info/plugin-xbmc/tvalacarta/
#
# Distributed under the terms of GNU General Public License v3 (GPLv3)
# http://www.gnu.org/licenses/gpl-3.0.html
# ------------------------------------------------------------
# This file is part of tvalacarta 4.
#
# tvalacarta 4 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# tvalacarta 4 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with tvalacarta 4. If not, see <http://www.gnu.org/licenses/>.
# --------------------------------------------------------------------------------
# json_tools - JSON load and parse functions with library detection
# Taken from pelisalacarta
# --------------------------------------------------------------------------------
import traceback
import logger
#Incorporadas las funciones loads() y dumps() para json y simplejson
def loads(*args, **kwargs):
try:
#logger.info("tvalacarta.core.jsontools loads Probando json incluido en el interprete")
import json
return to_utf8(json.loads(*args, **kwargs))
except ImportError:
pass
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools loads Probando simplejson incluido en el interprete")
import simplejson as json
return to_utf8(json.loads(*args, **kwargs))
except ImportError:
pass
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools loads Probando simplejson en el directorio lib")
from lib import simplejson as json
return to_utf8(json.loads(*args, **kwargs))
except ImportError:
pass
except:
logger.info(traceback.format_exc())
def dumps(*args, **kwargs):
try:
#logger.info("tvalacarta.core.jsontools loads Probando json incluido en el interprete")
import json
return json.dumps(*args, **kwargs)
except ImportError:
pass
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools loads Probando simplejson incluido en el interprete")
import simplejson as json
return json.dumps(*args, **kwargs)
except ImportError:
pass
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools loads Probando simplejson en el directorio lib")
from lib import simplejson as json
return json.dumps(*args, **kwargs)
except ImportError:
pass
except:
logger.info(traceback.format_exc())
def to_utf8(dct):
if isinstance(dct, dict):
return dict((to_utf8(key), to_utf8(value)) for key, value in dct.iteritems())
elif isinstance(dct, list):
return [to_utf8(element) for element in dct]
elif isinstance(dct, unicode):
return dct.encode('utf-8')
else:
return dct
##############
def load_json(data):
#logger.info("core.jsontools.load_json Probando simplejson en directorio lib")
try:
#logger.info("tvalacarta.core.jsontools.load_json Probando simplejson en directorio lib")
from lib import simplejson
json_data = simplejson.loads(data, object_hook= to_utf8)
logger.info("tvalacarta.core.jsontools.load_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.load_json Probando simplejson incluido en el interprete")
import simplejson
json_data = simplejson.loads(data, object_hook=to_utf8)
logger.info("tvalacarta.core.jsontools.load_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.load_json Probando json incluido en el interprete")
import json
json_data = json.loads(data, object_hook=to_utf8)
logger.info("tvalacarta.core.jsontools.load_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.load_json Probando JSON de Plex")
json_data = JSON.ObjectFromString(data, encoding="utf-8")
logger.info("tvalacarta.core.jsontools.load_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
logger.info("tvalacarta.core.jsontools.load_json No se ha encontrado un parser de JSON valido")
logger.info("tvalacarta.core.jsontools.load_json -> (nada)")
return ""
def dump_json(data):
#logger.info("tvalacarta.core.jsontools.dump_json Probando simplejson en directorio lib")
try:
#logger.info("tvalacarta.core.jsontools.dump_json Probando simplejson en directorio lib")
from lib import simplejson
json_data = simplejson.dumps(data, indent=4, skipkeys=True, sort_keys=True, ensure_ascii=False)
# json_data = byteify(json_data)
logger.info("tvalacarta.core.jsontools.dump_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.dump_json Probando simplejson incluido en el interprete")
import simplejson
json_data = simplejson.dumps(data, indent=4, skipkeys=True, sort_keys=True, ensure_ascii=False)
logger.info("tvalacarta.core.jsontools.dump_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.dump_json Probando json incluido en el interprete")
import json
json_data = json.dumps(data, indent=4, skipkeys=True, sort_keys=True, ensure_ascii=False)
logger.info("tvalacarta.core.jsontools.dump_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
try:
logger.info("tvalacarta.core.jsontools.dump_json Probando JSON de Plex")
json_data = JSON.StringFromObject(data) #, encoding="utf-8")
logger.info("tvalacarta.core.jsontools.dump_json -> "+repr(json_data))
return json_data
except:
logger.info(traceback.format_exc())
logger.info("tvalacarta.core.jsontools.dump_json No se ha encontrado un parser de JSON valido")
logger.info("tvalacarta.core.jsontools.dump_json -> (nada)")
return ""
def xmlTojson(path_xml):
'''Lee un fichero xml y retorna un diccionario json
Parametros:
path_xml (str) -- Ruta completa al archivo XML que se desea leer.
Retorna:
Si el argumento path_xml no señala a un archivo XML valido retorna un diccionario vacio.
En caso cortrario retorna un diccionario construido a partir de los campos del archivo XML.
'''
import os
ret ={}
try:
if os.path.exists(path_xml):
infile = open( path_xml , "rb" )
data = infile.read()
infile.close()
ret = Xml2Json(data).result
except:
import traceback
logger.info("tvalacarta.core.jsontools xmlTojson ERROR al leer el fichero y/o crear el json")
logger.info("tvalacarta.core.jsontools "+traceback.format_exc())
return ret
class Xml2Json:
# http://code.activestate.com/recipes/577494-convert-xml-into-json-python-dicts-and-lists-struc/
# >>> Xml2Json('<doc><tag><subtag>data</subtag><t>data1</t><t>data2</t></tag></doc>').result
# {u'doc': {u'tag': {u'subtag': u'data', u't': [u'data1', u'data2']}}}
LIST_TAGS = ['COMMANDS']
def __init__(self, data = None):
#print "################## INIT"
from xml.parsers.expat import ParserCreate
self._parser = ParserCreate()
self._parser.StartElementHandler = self.start
self._parser.EndElementHandler = self.end
self._parser.CharacterDataHandler = self.data
self.result = None
if data:
self.feed(data)
self.close()
def feed(self, data):
#print "################## FEED"
self._stack = []
self._data = ''
self._parser.Parse(data, 0)
def close(self):
self._parser.Parse("", 1)
del self._parser
#print "################## CLOSE"
self.result = to_utf8(self.result)
def start(self, tag, attrs):
assert attrs == {}
assert self._data.strip() == ''
#print "START", repr(tag)
self._stack.append([tag])
self._data = ''
def end(self, tag):
#print "END", repr(tag)
last_tag = self._stack.pop()
assert last_tag[0] == tag
if len(last_tag) == 1: #leaf
data = self._data
else:
if tag not in Xml2Json.LIST_TAGS:
# build a dict, repeating pairs get pushed into lists
data = {}
for k, v in last_tag[1:]:
if k not in data:
data[k] = v
else:
el = data[k]
if type(el) is not list:
data[k] = [el, v]
else:
el.append(v)
else: #force into a list
data = [{k:v} for k, v in last_tag[1:]]
if self._stack:
self._stack[-1].append((tag, data))
else:
self.result = {tag:data}
self._data = ''
def data(self, data):
#print "################## DATA"
self._data = data
|
gpl-3.0
|
ccomb/OpenUpgrade
|
openerp/report/render/html2html/html2html.py
|
443
|
4238
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report.render.rml2pdf import utils
import copy
import base64
import cStringIO
import re
from reportlab.lib.utils import ImageReader
_regex = re.compile('\[\[(.+?)\]\]')
utils._regex = re.compile('\[\[\s*(.+?)\s*\]\]',re.DOTALL)
class html2html(object):
def __init__(self, html, localcontext):
self.localcontext = localcontext
self.etree = html
self._node = None
def render(self):
def process_text(node,new_node):
if new_node.tag in ['story','tr','section']:
new_node.attrib.clear()
for child in utils._child_get(node, self):
new_child = copy.deepcopy(child)
new_node.append(new_child)
if len(child):
for n in new_child:
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
new_child.remove(n)
process_text(child, new_child)
else:
if new_child.tag=='img' and new_child.get('name'):
if _regex.findall(new_child.get('name')) :
src = utils._process_text(self, new_child.get('name'))
if src :
new_child.set('src','data:image/gif;base64,%s'%src)
output = cStringIO.StringIO(base64.decodestring(src))
img = ImageReader(output)
(width,height) = img.getSize()
if not new_child.get('width'):
new_child.set('width',str(width))
if not new_child.get('height') :
new_child.set('height',str(height))
else :
new_child.getparent().remove(new_child)
new_child.text = utils._process_text(self, child.text)
new_child.tail = utils._process_text(self, child.tail)
self._node = copy.deepcopy(self.etree)
for n in self._node:
self._node.remove(n)
process_text(self.etree, self._node)
return self._node
def url_modify(self,root):
for n in root:
if (n.text.find('<a ')>=0 or n.text.find('<a')>=0) and n.text.find('href')>=0 and n.text.find('style')<=0 :
node = (n.tag=='span' and n.getparent().tag=='u') and n.getparent().getparent() or ((n.tag=='span') and n.getparent()) or n
style = node.get('color') and "style='color:%s; text-decoration: none;'"%node.get('color') or ''
if n.text.find('<a')>=0:
t = '<a '
else :
t = '<a '
href = n.text.split(t)[-1]
n.text = ' '.join([t,style,href])
self.url_modify(n)
return root
def parseString(node, localcontext = {}):
r = html2html(node, localcontext)
root = r.render()
root = r.url_modify(root)
return root
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Danielhiversen/home-assistant
|
homeassistant/components/sensor/nut.py
|
6
|
12534
|
"""
Provides a sensor to track various status aspects of a UPS.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.nut/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
import homeassistant.helpers.config_validation as cv
from homeassistant.const import (
CONF_HOST, CONF_PORT, CONF_NAME, CONF_USERNAME, CONF_PASSWORD,
TEMP_CELSIUS, CONF_RESOURCES, CONF_ALIAS, ATTR_STATE, STATE_UNKNOWN)
from homeassistant.exceptions import PlatformNotReady
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
REQUIREMENTS = ['pynut2==2.1.2']
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = 'NUT UPS'
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 3493
KEY_STATUS = 'ups.status'
KEY_STATUS_DISPLAY = 'ups.status.display'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
SENSOR_TYPES = {
'ups.status.display': ['Status', '', 'mdi:information-outline'],
'ups.status': ['Status Data', '', 'mdi:information-outline'],
'ups.alarm': ['Alarms', '', 'mdi:alarm'],
'ups.time': ['Internal Time', '', 'mdi:calendar-clock'],
'ups.date': ['Internal Date', '', 'mdi:calendar'],
'ups.model': ['Model', '', 'mdi:information-outline'],
'ups.mfr': ['Manufacturer', '', 'mdi:information-outline'],
'ups.mfr.date': ['Manufacture Date', '', 'mdi:calendar'],
'ups.serial': ['Serial Number', '', 'mdi:information-outline'],
'ups.vendorid': ['Vendor ID', '', 'mdi:information-outline'],
'ups.productid': ['Product ID', '', 'mdi:information-outline'],
'ups.firmware': ['Firmware Version', '', 'mdi:information-outline'],
'ups.firmware.aux': ['Firmware Version 2', '', 'mdi:information-outline'],
'ups.temperature': ['UPS Temperature', TEMP_CELSIUS, 'mdi:thermometer'],
'ups.load': ['Load', '%', 'mdi:gauge'],
'ups.load.high': ['Overload Setting', '%', 'mdi:gauge'],
'ups.id': ['System identifier', '', 'mdi:information-outline'],
'ups.delay.start': ['Load Restart Delay', 's', 'mdi:timer'],
'ups.delay.reboot': ['UPS Reboot Delay', 's', 'mdi:timer'],
'ups.delay.shutdown': ['UPS Shutdown Delay', 's', 'mdi:timer'],
'ups.timer.start': ['Load Start Timer', 's', 'mdi:timer'],
'ups.timer.reboot': ['Load Reboot Timer', 's', 'mdi:timer'],
'ups.timer.shutdown': ['Load Shutdown Timer', 's', 'mdi:timer'],
'ups.test.interval': ['Self-Test Interval', 's', 'mdi:timer'],
'ups.test.result': ['Self-Test Result', '', 'mdi:information-outline'],
'ups.test.date': ['Self-Test Date', '', 'mdi:calendar'],
'ups.display.language': ['Language', '', 'mdi:information-outline'],
'ups.contacts': ['External Contacts', '', 'mdi:information-outline'],
'ups.efficiency': ['Efficiency', '%', 'mdi:gauge'],
'ups.power': ['Current Apparent Power', 'VA', 'mdi:flash'],
'ups.power.nominal': ['Nominal Power', 'VA', 'mdi:flash'],
'ups.realpower': ['Current Real Power', 'W', 'mdi:flash'],
'ups.realpower.nominal': ['Nominal Real Power', 'W', 'mdi:flash'],
'ups.beeper.status': ['Beeper Status', '', 'mdi:information-outline'],
'ups.type': ['UPS Type', '', 'mdi:information-outline'],
'ups.watchdog.status': ['Watchdog Status', '', 'mdi:information-outline'],
'ups.start.auto': ['Start on AC', '', 'mdi:information-outline'],
'ups.start.battery': ['Start on Battery', '', 'mdi:information-outline'],
'ups.start.reboot': ['Reboot on Battery', '', 'mdi:information-outline'],
'ups.shutdown': ['Shutdown Ability', '', 'mdi:information-outline'],
'battery.charge': ['Battery Charge', '%', 'mdi:gauge'],
'battery.charge.low': ['Low Battery Setpoint', '%', 'mdi:gauge'],
'battery.charge.restart': ['Minimum Battery to Start', '%', 'mdi:gauge'],
'battery.charge.warning': ['Warning Battery Setpoint', '%', 'mdi:gauge'],
'battery.charger.status':
['Charging Status', '', 'mdi:information-outline'],
'battery.voltage': ['Battery Voltage', 'V', 'mdi:flash'],
'battery.voltage.nominal': ['Nominal Battery Voltage', 'V', 'mdi:flash'],
'battery.voltage.low': ['Low Battery Voltage', 'V', 'mdi:flash'],
'battery.voltage.high': ['High Battery Voltage', 'V', 'mdi:flash'],
'battery.capacity': ['Battery Capacity', 'Ah', 'mdi:flash'],
'battery.current': ['Battery Current', 'A', 'mdi:flash'],
'battery.current.total': ['Total Battery Current', 'A', 'mdi:flash'],
'battery.temperature':
['Battery Temperature', TEMP_CELSIUS, 'mdi:thermometer'],
'battery.runtime': ['Battery Runtime', 's', 'mdi:timer'],
'battery.runtime.low': ['Low Battery Runtime', 's', 'mdi:timer'],
'battery.runtime.restart':
['Minimum Battery Runtime to Start', 's', 'mdi:timer'],
'battery.alarm.threshold':
['Battery Alarm Threshold', '', 'mdi:information-outline'],
'battery.date': ['Battery Date', '', 'mdi:calendar'],
'battery.mfr.date': ['Battery Manuf. Date', '', 'mdi:calendar'],
'battery.packs': ['Number of Batteries', '', 'mdi:information-outline'],
'battery.packs.bad':
['Number of Bad Batteries', '', 'mdi:information-outline'],
'battery.type': ['Battery Chemistry', '', 'mdi:information-outline'],
'input.sensitivity':
['Input Power Sensitivity', '', 'mdi:information-outline'],
'input.transfer.low': ['Low Voltage Transfer', 'V', 'mdi:flash'],
'input.transfer.high': ['High Voltage Transfer', 'V', 'mdi:flash'],
'input.transfer.reason':
['Voltage Transfer Reason', '', 'mdi:information-outline'],
'input.voltage': ['Input Voltage', 'V', 'mdi:flash'],
'input.voltage.nominal': ['Nominal Input Voltage', 'V', 'mdi:flash'],
'input.frequency': ['Input Line Frequency', 'hz', 'mdi:flash'],
'input.frequency.nominal':
['Nominal Input Line Frequency', 'hz', 'mdi:flash'],
'input.frequency.status':
['Input Frequency Status', '', 'mdi:information-outline'],
'output.current': ['Output Current', 'A', 'mdi:flash'],
'output.current.nominal':
['Nominal Output Current', 'A', 'mdi:flash'],
'output.voltage': ['Output Voltage', 'V', 'mdi:flash'],
'output.voltage.nominal':
['Nominal Output Voltage', 'V', 'mdi:flash'],
'output.frequency': ['Output Frequency', 'hz', 'mdi:flash'],
'output.frequency.nominal':
['Nominal Output Frequency', 'hz', 'mdi:flash'],
}
STATE_TYPES = {
'OL': 'Online',
'OB': 'On Battery',
'LB': 'Low Battery',
'HB': 'High Battery',
'RB': 'Battery Needs Replaced',
'CHRG': 'Battery Charging',
'DISCHRG': 'Battery Discharging',
'BYPASS': 'Bypass Active',
'CAL': 'Runtime Calibration',
'OFF': 'Offline',
'OVER': 'Overloaded',
'TRIM': 'Trimming Voltage',
'BOOST': 'Boosting Voltage',
'FSD': 'Forced Shutdown',
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ALIAS): cv.string,
vol.Optional(CONF_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD): cv.string,
vol.Required(CONF_RESOURCES):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NUT sensors."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
alias = config.get(CONF_ALIAS)
username = config.get(CONF_USERNAME)
password = config.get(CONF_PASSWORD)
data = PyNUTData(host, port, alias, username, password)
if data.status is None:
_LOGGER.error("NUT Sensor has no data, unable to set up")
raise PlatformNotReady
_LOGGER.debug('NUT Sensors Available: %s', data.status)
entities = []
for resource in config[CONF_RESOURCES]:
sensor_type = resource.lower()
# Display status is a special case that falls back to the status value
# of the UPS instead.
if sensor_type in data.status or (sensor_type == KEY_STATUS_DISPLAY
and KEY_STATUS in data.status):
entities.append(NUTSensor(name, data, sensor_type))
else:
_LOGGER.warning(
"Sensor type: %s does not appear in the NUT status "
"output, cannot add", sensor_type)
try:
data.update(no_throttle=True)
except data.pynuterror as err:
_LOGGER.error("Failure while testing NUT status retrieval. "
"Cannot continue setup: %s", err)
raise PlatformNotReady
add_entities(entities, True)
class NUTSensor(Entity):
"""Representation of a sensor entity for NUT status values."""
def __init__(self, name, data, sensor_type):
"""Initialize the sensor."""
self._data = data
self.type = sensor_type
self._name = "{} {}".format(name, SENSOR_TYPES[sensor_type][0])
self._unit = SENSOR_TYPES[sensor_type][1]
self._state = None
@property
def name(self):
"""Return the name of the UPS sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][2]
@property
def state(self):
"""Return entity state from ups."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit
@property
def device_state_attributes(self):
"""Return the sensor attributes."""
attr = dict()
attr[ATTR_STATE] = self.display_state()
return attr
def display_state(self):
"""Return UPS display state."""
if self._data.status is None:
return STATE_TYPES['OFF']
try:
return " ".join(
STATE_TYPES[state]
for state in self._data.status[KEY_STATUS].split())
except KeyError:
return STATE_UNKNOWN
def update(self):
"""Get the latest status and use it to update our sensor state."""
if self._data.status is None:
self._state = None
return
# In case of the display status sensor, keep a human-readable form
# as the sensor state.
if self.type == KEY_STATUS_DISPLAY:
self._state = self.display_state()
elif self.type not in self._data.status:
self._state = None
else:
self._state = self._data.status[self.type]
class PyNUTData:
"""Stores the data retrieved from NUT.
For each entity to use, acts as the single point responsible for fetching
updates from the server.
"""
def __init__(self, host, port, alias, username, password):
"""Initialize the data object."""
from pynut2.nut2 import PyNUTClient, PyNUTError
self._host = host
self._port = port
self._alias = alias
self._username = username
self._password = password
self.pynuterror = PyNUTError
# Establish client with persistent=False to open/close connection on
# each update call. This is more reliable with async.
self._client = PyNUTClient(self._host, self._port,
self._username, self._password, 5, False)
self._status = None
@property
def status(self):
"""Get latest update if throttle allows. Return status."""
self.update()
return self._status
def _get_alias(self):
"""Get the ups alias from NUT."""
try:
return next(iter(self._client.list_ups()))
except self.pynuterror as err:
_LOGGER.error("Failure getting NUT ups alias, %s", err)
return None
def _get_status(self):
"""Get the ups status from NUT."""
if self._alias is None:
self._alias = self._get_alias()
try:
return self._client.list_vars(self._alias)
except (self.pynuterror, ConnectionResetError) as err:
_LOGGER.debug(
"Error getting NUT vars for host %s: %s", self._host, err)
return None
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self, **kwargs):
"""Fetch the latest status from NUT."""
self._status = self._get_status()
|
mit
|
mmauroy/SickRage
|
tornado/test/simple_httpclient_test.py
|
20
|
22936
|
from __future__ import absolute_import, division, print_function, with_statement
import collections
from contextlib import closing
import errno
import gzip
import logging
import os
import re
import socket
import sys
from tornado import gen
from tornado.httpclient import AsyncHTTPClient
from tornado.httputil import HTTPHeaders
from tornado.ioloop import IOLoop
from tornado.log import gen_log
from tornado.netutil import Resolver, bind_sockets
from tornado.simple_httpclient import SimpleAsyncHTTPClient, _default_ca_certs
from tornado.test.httpclient_test import ChunkHandler, CountdownHandler, HelloWorldHandler
from tornado.test import httpclient_test
from tornado.testing import AsyncHTTPTestCase, AsyncHTTPSTestCase, AsyncTestCase, ExpectLog
from tornado.test.util import skipOnTravis, skipIfNoIPv6, refusing_port
from tornado.web import RequestHandler, Application, asynchronous, url, stream_request_body
class SimpleHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
def get_http_client(self):
client = SimpleAsyncHTTPClient(io_loop=self.io_loop,
force_instance=True)
self.assertTrue(isinstance(client, SimpleAsyncHTTPClient))
return client
class TriggerHandler(RequestHandler):
def initialize(self, queue, wake_callback):
self.queue = queue
self.wake_callback = wake_callback
@asynchronous
def get(self):
logging.debug("queuing trigger")
self.queue.append(self.finish)
if self.get_argument("wake", "true") == "true":
self.wake_callback()
class HangHandler(RequestHandler):
@asynchronous
def get(self):
pass
class ContentLengthHandler(RequestHandler):
def get(self):
self.set_header("Content-Length", self.get_argument("value"))
self.write("ok")
class HeadHandler(RequestHandler):
def head(self):
self.set_header("Content-Length", "7")
class OptionsHandler(RequestHandler):
def options(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.write("ok")
class NoContentHandler(RequestHandler):
def get(self):
if self.get_argument("error", None):
self.set_header("Content-Length", "5")
self.write("hello")
self.set_status(204)
class SeeOtherPostHandler(RequestHandler):
def post(self):
redirect_code = int(self.request.body)
assert redirect_code in (302, 303), "unexpected body %r" % self.request.body
self.set_header("Location", "/see_other_get")
self.set_status(redirect_code)
class SeeOtherGetHandler(RequestHandler):
def get(self):
if self.request.body:
raise Exception("unexpected body %r" % self.request.body)
self.write("ok")
class HostEchoHandler(RequestHandler):
def get(self):
self.write(self.request.headers["Host"])
class NoContentLengthHandler(RequestHandler):
@gen.coroutine
def get(self):
# Emulate the old HTTP/1.0 behavior of returning a body with no
# content-length. Tornado handles content-length at the framework
# level so we have to go around it.
stream = self.request.connection.stream
yield stream.write(b"HTTP/1.0 200 OK\r\n\r\n"
b"hello")
stream.close()
class EchoPostHandler(RequestHandler):
def post(self):
self.write(self.request.body)
@stream_request_body
class RespondInPrepareHandler(RequestHandler):
def prepare(self):
self.set_status(403)
self.finish("forbidden")
class SimpleHTTPClientTestMixin(object):
def get_app(self):
# callable objects to finish pending /trigger requests
self.triggers = collections.deque()
return Application([
url("/trigger", TriggerHandler, dict(queue=self.triggers,
wake_callback=self.stop)),
url("/chunk", ChunkHandler),
url("/countdown/([0-9]+)", CountdownHandler, name="countdown"),
url("/hang", HangHandler),
url("/hello", HelloWorldHandler),
url("/content_length", ContentLengthHandler),
url("/head", HeadHandler),
url("/options", OptionsHandler),
url("/no_content", NoContentHandler),
url("/see_other_post", SeeOtherPostHandler),
url("/see_other_get", SeeOtherGetHandler),
url("/host_echo", HostEchoHandler),
url("/no_content_length", NoContentLengthHandler),
url("/echo_post", EchoPostHandler),
url("/respond_in_prepare", RespondInPrepareHandler),
], gzip=True)
def test_singleton(self):
# Class "constructor" reuses objects on the same IOLoop
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is
SimpleAsyncHTTPClient(self.io_loop))
# unless force_instance is used
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
SimpleAsyncHTTPClient(self.io_loop,
force_instance=True))
# different IOLoops use different objects
with closing(IOLoop()) as io_loop2:
self.assertTrue(SimpleAsyncHTTPClient(self.io_loop) is not
SimpleAsyncHTTPClient(io_loop2))
def test_connection_limit(self):
with closing(self.create_client(max_clients=2)) as client:
self.assertEqual(client.max_clients, 2)
seen = []
# Send 4 requests. Two can be sent immediately, while the others
# will be queued
for i in range(4):
client.fetch(self.get_url("/trigger"),
lambda response, i=i: (seen.append(i), self.stop()))
self.wait(condition=lambda: len(self.triggers) == 2)
self.assertEqual(len(client.queue), 2)
# Finish the first two requests and let the next two through
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: (len(self.triggers) == 2 and
len(seen) == 2))
self.assertEqual(set(seen), set([0, 1]))
self.assertEqual(len(client.queue), 0)
# Finish all the pending requests
self.triggers.popleft()()
self.triggers.popleft()()
self.wait(condition=lambda: len(seen) == 4)
self.assertEqual(set(seen), set([0, 1, 2, 3]))
self.assertEqual(len(self.triggers), 0)
def test_redirect_connection_limit(self):
# following redirects should not consume additional connections
with closing(self.create_client(max_clients=1)) as client:
client.fetch(self.get_url('/countdown/3'), self.stop,
max_redirects=3)
response = self.wait()
response.rethrow()
def test_default_certificates_exist(self):
open(_default_ca_certs()).close()
def test_gzip(self):
# All the tests in this file should be using gzip, but this test
# ensures that it is in fact getting compressed.
# Setting Accept-Encoding manually bypasses the client's
# decompression so we can see the raw data.
response = self.fetch("/chunk", use_gzip=False,
headers={"Accept-Encoding": "gzip"})
self.assertEqual(response.headers["Content-Encoding"], "gzip")
self.assertNotEqual(response.body, b"asdfqwer")
# Our test data gets bigger when gzipped. Oops. :)
self.assertEqual(len(response.body), 34)
f = gzip.GzipFile(mode="r", fileobj=response.buffer)
self.assertEqual(f.read(), b"asdfqwer")
def test_max_redirects(self):
response = self.fetch("/countdown/5", max_redirects=3)
self.assertEqual(302, response.code)
# We requested 5, followed three redirects for 4, 3, 2, then the last
# unfollowed redirect is to 1.
self.assertTrue(response.request.url.endswith("/countdown/5"))
self.assertTrue(response.effective_url.endswith("/countdown/2"))
self.assertTrue(response.headers["Location"].endswith("/countdown/1"))
def test_header_reuse(self):
# Apps may reuse a headers object if they are only passing in constant
# headers like user-agent. The header object should not be modified.
headers = HTTPHeaders({'User-Agent': 'Foo'})
self.fetch("/hello", headers=headers)
self.assertEqual(list(headers.get_all()), [('User-Agent', 'Foo')])
def test_see_other_redirect(self):
for code in (302, 303):
response = self.fetch("/see_other_post", method="POST", body="%d" % code)
self.assertEqual(200, response.code)
self.assertTrue(response.request.url.endswith("/see_other_post"))
self.assertTrue(response.effective_url.endswith("/see_other_get"))
# request is the original request, is a POST still
self.assertEqual("POST", response.request.method)
@skipOnTravis
def test_request_timeout(self):
timeout = 0.1
timeout_min, timeout_max = 0.099, 0.15
if os.name == 'nt':
timeout = 0.5
timeout_min, timeout_max = 0.4, 0.6
response = self.fetch('/trigger?wake=false', request_timeout=timeout)
self.assertEqual(response.code, 599)
self.assertTrue(timeout_min < response.request_time < timeout_max,
response.request_time)
self.assertEqual(str(response.error), "HTTP 599: Timeout")
# trigger the hanging request to let it clean up after itself
self.triggers.popleft()()
@skipIfNoIPv6
def test_ipv6(self):
try:
[sock] = bind_sockets(None, '::1', family=socket.AF_INET6)
port = sock.getsockname()[1]
self.http_server.add_socket(sock)
except socket.gaierror as e:
if e.args[0] == socket.EAI_ADDRFAMILY:
# python supports ipv6, but it's not configured on the network
# interface, so skip this test.
return
raise
url = '%s://[::1]:%d/hello' % (self.get_protocol(), port)
# ipv6 is currently enabled by default but can be disabled
self.http_client.fetch(url, self.stop, allow_ipv6=False)
response = self.wait()
self.assertEqual(response.code, 599)
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertEqual(response.body, b"Hello world!")
def xtest_multiple_content_length_accepted(self):
response = self.fetch("/content_length?value=2,2")
self.assertEqual(response.body, b"ok")
response = self.fetch("/content_length?value=2,%202,2")
self.assertEqual(response.body, b"ok")
response = self.fetch("/content_length?value=2,4")
self.assertEqual(response.code, 599)
response = self.fetch("/content_length?value=2,%202,3")
self.assertEqual(response.code, 599)
def test_head_request(self):
response = self.fetch("/head", method="HEAD")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "7")
self.assertFalse(response.body)
def test_options_request(self):
response = self.fetch("/options", method="OPTIONS")
self.assertEqual(response.code, 200)
self.assertEqual(response.headers["content-length"], "2")
self.assertEqual(response.headers["access-control-allow-origin"], "*")
self.assertEqual(response.body, b"ok")
def test_no_content(self):
response = self.fetch("/no_content")
self.assertEqual(response.code, 204)
# 204 status doesn't need a content-length, but tornado will
# add a zero content-length anyway.
#
# A test without a content-length header is included below
# in HTTP204NoContentTestCase.
self.assertEqual(response.headers["Content-length"], "0")
# 204 status with non-zero content length is malformed
with ExpectLog(gen_log, "Malformed HTTP message"):
response = self.fetch("/no_content?error=1")
self.assertEqual(response.code, 599)
def test_host_header(self):
host_re = re.compile(b"^localhost:[0-9]+$")
response = self.fetch("/host_echo")
self.assertTrue(host_re.match(response.body))
url = self.get_url("/host_echo").replace("http://", "http://me:secret@")
self.http_client.fetch(url, self.stop)
response = self.wait()
self.assertTrue(host_re.match(response.body), response.body)
def test_connection_refused(self):
cleanup_func, port = refusing_port()
self.addCleanup(cleanup_func)
with ExpectLog(gen_log, ".*", required=False):
self.http_client.fetch("http://127.0.0.1:%d/" % port, self.stop)
response = self.wait()
self.assertEqual(599, response.code)
if sys.platform != 'cygwin':
# cygwin returns EPERM instead of ECONNREFUSED here
contains_errno = str(errno.ECONNREFUSED) in str(response.error)
if not contains_errno and hasattr(errno, "WSAECONNREFUSED"):
contains_errno = str(errno.WSAECONNREFUSED) in str(response.error)
self.assertTrue(contains_errno, response.error)
# This is usually "Connection refused".
# On windows, strerror is broken and returns "Unknown error".
expected_message = os.strerror(errno.ECONNREFUSED)
self.assertTrue(expected_message in str(response.error),
response.error)
def test_queue_timeout(self):
with closing(self.create_client(max_clients=1)) as client:
client.fetch(self.get_url('/trigger'), self.stop,
request_timeout=10)
# Wait for the trigger request to block, not complete.
self.wait()
client.fetch(self.get_url('/hello'), self.stop,
connect_timeout=0.1)
response = self.wait()
self.assertEqual(response.code, 599)
self.assertTrue(response.request_time < 1, response.request_time)
self.assertEqual(str(response.error), "HTTP 599: Timeout")
self.triggers.popleft()()
self.wait()
def test_no_content_length(self):
response = self.fetch("/no_content_length")
self.assertEquals(b"hello", response.body)
def sync_body_producer(self, write):
write(b'1234')
write(b'5678')
@gen.coroutine
def async_body_producer(self, write):
yield write(b'1234')
yield gen.Task(IOLoop.current().add_callback)
yield write(b'5678')
def test_sync_body_producer_chunked(self):
response = self.fetch("/echo_post", method="POST",
body_producer=self.sync_body_producer)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_sync_body_producer_content_length(self):
response = self.fetch("/echo_post", method="POST",
body_producer=self.sync_body_producer,
headers={'Content-Length': '8'})
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_async_body_producer_chunked(self):
response = self.fetch("/echo_post", method="POST",
body_producer=self.async_body_producer)
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_async_body_producer_content_length(self):
response = self.fetch("/echo_post", method="POST",
body_producer=self.async_body_producer,
headers={'Content-Length': '8'})
response.rethrow()
self.assertEqual(response.body, b"12345678")
def test_100_continue(self):
response = self.fetch("/echo_post", method="POST",
body=b"1234",
expect_100_continue=True)
self.assertEqual(response.body, b"1234")
def test_100_continue_early_response(self):
def body_producer(write):
raise Exception("should not be called")
response = self.fetch("/respond_in_prepare", method="POST",
body_producer=body_producer,
expect_100_continue=True)
self.assertEqual(response.code, 403)
class SimpleHTTPClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPTestCase):
def setUp(self):
super(SimpleHTTPClientTestCase, self).setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
**kwargs)
class SimpleHTTPSClientTestCase(SimpleHTTPClientTestMixin, AsyncHTTPSTestCase):
def setUp(self):
super(SimpleHTTPSClientTestCase, self).setUp()
self.http_client = self.create_client()
def create_client(self, **kwargs):
return SimpleAsyncHTTPClient(self.io_loop, force_instance=True,
defaults=dict(validate_cert=False),
**kwargs)
class CreateAsyncHTTPClientTestCase(AsyncTestCase):
def setUp(self):
super(CreateAsyncHTTPClientTestCase, self).setUp()
self.saved = AsyncHTTPClient._save_configuration()
def tearDown(self):
AsyncHTTPClient._restore_configuration(self.saved)
super(CreateAsyncHTTPClientTestCase, self).tearDown()
def test_max_clients(self):
AsyncHTTPClient.configure(SimpleAsyncHTTPClient)
with closing(AsyncHTTPClient(
self.io_loop, force_instance=True)) as client:
self.assertEqual(client.max_clients, 10)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=11, force_instance=True)) as client:
self.assertEqual(client.max_clients, 11)
# Now configure max_clients statically and try overriding it
# with each way max_clients can be passed
AsyncHTTPClient.configure(SimpleAsyncHTTPClient, max_clients=12)
with closing(AsyncHTTPClient(
self.io_loop, force_instance=True)) as client:
self.assertEqual(client.max_clients, 12)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=13, force_instance=True)) as client:
self.assertEqual(client.max_clients, 13)
with closing(AsyncHTTPClient(
self.io_loop, max_clients=14, force_instance=True)) as client:
self.assertEqual(client.max_clients, 14)
class HTTP100ContinueTestCase(AsyncHTTPTestCase):
def respond_100(self, request):
self.request = request
self.request.connection.stream.write(
b"HTTP/1.1 100 CONTINUE\r\n\r\n",
self.respond_200)
def respond_200(self):
self.request.connection.stream.write(
b"HTTP/1.1 200 OK\r\nContent-Length: 1\r\n\r\nA",
self.request.connection.stream.close)
def get_app(self):
# Not a full Application, but works as an HTTPServer callback
return self.respond_100
def test_100_continue(self):
res = self.fetch('/')
self.assertEqual(res.body, b'A')
class HTTP204NoContentTestCase(AsyncHTTPTestCase):
def respond_204(self, request):
# A 204 response never has a body, even if doesn't have a content-length
# (which would otherwise mean read-until-close). Tornado always
# sends a content-length, so we simulate here a server that sends
# no content length and does not close the connection.
#
# Tests of a 204 response with a Content-Length header are included
# in SimpleHTTPClientTestMixin.
request.connection.stream.write(
b"HTTP/1.1 204 No content\r\n\r\n")
def get_app(self):
return self.respond_204
def test_204_no_content(self):
resp = self.fetch('/')
self.assertEqual(resp.code, 204)
self.assertEqual(resp.body, b'')
class HostnameMappingTestCase(AsyncHTTPTestCase):
def setUp(self):
super(HostnameMappingTestCase, self).setUp()
self.http_client = SimpleAsyncHTTPClient(
self.io_loop,
hostname_mapping={
'www.example.com': '127.0.0.1',
('foo.example.com', 8000): ('127.0.0.1', self.get_http_port()),
})
def get_app(self):
return Application([url("/hello", HelloWorldHandler), ])
def test_hostname_mapping(self):
self.http_client.fetch(
'http://www.example.com:%d/hello' % self.get_http_port(), self.stop)
response = self.wait()
response.rethrow()
self.assertEqual(response.body, b'Hello world!')
def test_port_mapping(self):
self.http_client.fetch('http://foo.example.com:8000/hello', self.stop)
response = self.wait()
response.rethrow()
self.assertEqual(response.body, b'Hello world!')
class ResolveTimeoutTestCase(AsyncHTTPTestCase):
def setUp(self):
# Dummy Resolver subclass that never invokes its callback.
class BadResolver(Resolver):
def resolve(self, *args, **kwargs):
pass
super(ResolveTimeoutTestCase, self).setUp()
self.http_client = SimpleAsyncHTTPClient(
self.io_loop,
resolver=BadResolver())
def get_app(self):
return Application([url("/hello", HelloWorldHandler), ])
def test_resolve_timeout(self):
response = self.fetch('/hello', connect_timeout=0.1)
self.assertEqual(response.code, 599)
class MaxHeaderSizeTest(AsyncHTTPTestCase):
def get_app(self):
class SmallHeaders(RequestHandler):
def get(self):
self.set_header("X-Filler", "a" * 100)
self.write("ok")
class LargeHeaders(RequestHandler):
def get(self):
self.set_header("X-Filler", "a" * 1000)
self.write("ok")
return Application([('/small', SmallHeaders),
('/large', LargeHeaders)])
def get_http_client(self):
return SimpleAsyncHTTPClient(io_loop=self.io_loop, max_header_size=1024)
def test_small_headers(self):
response = self.fetch('/small')
response.rethrow()
self.assertEqual(response.body, b'ok')
def test_large_headers(self):
with ExpectLog(gen_log, "Unsatisfiable read"):
response = self.fetch('/large')
self.assertEqual(response.code, 599)
|
gpl-3.0
|
nlloyd/SubliminalCollaborator
|
libs/twisted/scripts/test/test_scripts.py
|
6
|
5896
|
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for the command-line scripts in the top-level I{bin/} directory.
Tests for actual functionality belong elsewhere, written in a way that doesn't
involve launching child processes.
"""
from os import devnull, getcwd, chdir
from sys import executable
from subprocess import PIPE, Popen
from twisted.trial.unittest import SkipTest, TestCase
from twisted.python.modules import getModule
from twisted.python.filepath import FilePath
from twisted.python.test.test_shellcomp import ZshScriptTestMixin
class ScriptTestsMixin:
"""
Mixin for L{TestCase} subclasses which defines a helper function for testing
a Twisted-using script.
"""
bin = getModule("twisted").pathEntry.filePath.child("bin")
def scriptTest(self, name):
"""
Verify that the given script runs and uses the version of Twisted
currently being tested.
This only works when running tests against a vcs checkout of Twisted,
since it relies on the scripts being in the place they are kept in
version control, and exercises their logic for finding the right version
of Twisted to use in that situation.
@param name: A path fragment, relative to the I{bin} directory of a
Twisted source checkout, identifying a script to test.
@type name: C{str}
@raise SkipTest: if the script is not where it is expected to be.
"""
script = self.bin.preauthChild(name)
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
from twisted.copyright import version
scriptVersion = Popen(
[executable, script.path, '--version'],
stdout=PIPE, stderr=file(devnull)).stdout.read()
self.assertIn(str(version), scriptVersion)
class ScriptTests(TestCase, ScriptTestsMixin):
"""
Tests for the core scripts.
"""
def test_twistd(self):
self.scriptTest("twistd")
def test_twistdPathInsert(self):
"""
The twistd script adds the current working directory to sys.path so
that it's able to import modules from it.
"""
script = self.bin.child("twistd")
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
cwd = getcwd()
self.addCleanup(chdir, cwd)
testDir = FilePath(self.mktemp())
testDir.makedirs()
chdir(testDir.path)
testDir.child("bar.tac").setContent(
"import sys\n"
"print sys.path\n")
output = Popen(
[executable, script.path, '-ny', 'bar.tac'],
stdout=PIPE, stderr=file(devnull)).stdout.read()
self.assertIn(repr(testDir.path), output)
def test_manhole(self):
self.scriptTest("manhole")
def test_trial(self):
self.scriptTest("trial")
def test_trialPathInsert(self):
"""
The trial script adds the current working directory to sys.path so that
it's able to import modules from it.
"""
script = self.bin.child("trial")
if not script.exists():
raise SkipTest(
"Script tests do not apply to installed configuration.")
cwd = getcwd()
self.addCleanup(chdir, cwd)
testDir = FilePath(self.mktemp())
testDir.makedirs()
chdir(testDir.path)
testDir.child("foo.py").setContent("")
output = Popen(
[executable, script.path, 'foo'],
stdout=PIPE, stderr=file(devnull)).stdout.read()
self.assertIn("PASSED", output)
def test_pyhtmlizer(self):
self.scriptTest("pyhtmlizer")
def test_tap2rpm(self):
self.scriptTest("tap2rpm")
def test_tap2deb(self):
self.scriptTest("tap2deb")
def test_tapconvert(self):
self.scriptTest("tapconvert")
def test_deprecatedTkunzip(self):
"""
The entire L{twisted.scripts.tkunzip} module, part of the old Windows
installer tool chain, is deprecated.
"""
from twisted.scripts import tkunzip
warnings = self.flushWarnings(
offendingFunctions=[self.test_deprecatedTkunzip])
self.assertEqual(DeprecationWarning, warnings[0]['category'])
self.assertEqual(
"twisted.scripts.tkunzip was deprecated in Twisted 11.1.0: "
"Seek unzipping software outside of Twisted.",
warnings[0]['message'])
self.assertEqual(1, len(warnings))
def test_deprecatedTapconvert(self):
"""
The entire L{twisted.scripts.tapconvert} module is deprecated.
"""
from twisted.scripts import tapconvert
warnings = self.flushWarnings(
offendingFunctions=[self.test_deprecatedTapconvert])
self.assertEqual(DeprecationWarning, warnings[0]['category'])
self.assertEqual(
"twisted.scripts.tapconvert was deprecated in Twisted 12.1.0: "
"tapconvert has been deprecated.",
warnings[0]['message'])
self.assertEqual(1, len(warnings))
class ZshIntegrationTestCase(TestCase, ZshScriptTestMixin):
"""
Test that zsh completion functions are generated without error
"""
generateFor = [('twistd', 'twisted.scripts.twistd.ServerOptions'),
('trial', 'twisted.scripts.trial.Options'),
('pyhtmlizer', 'twisted.scripts.htmlizer.Options'),
('tap2rpm', 'twisted.scripts.tap2rpm.MyOptions'),
('tap2deb', 'twisted.scripts.tap2deb.MyOptions'),
('tapconvert', 'twisted.scripts.tapconvert.ConvertOptions'),
('manhole', 'twisted.scripts.manhole.MyOptions')
]
|
apache-2.0
|
mlizhenbin/zookeeper
|
src/contrib/zkpython/src/test/clientid_test.py
|
164
|
1739
|
#!/usr/bin/python
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest, threading
import zookeeper, zktestbase
class ClientidTest(zktestbase.TestBase):
"""Test whether clientids work"""
def setUp(self):
pass
def testclientid(self):
cv = threading.Condition()
self.connected = False
def connection_watcher(handle, type, state, path):
cv.acquire()
self.connected = True
cv.notify()
cv.release()
cv.acquire()
self.handle = zookeeper.init(self.host, connection_watcher,10000,(123456,"mypassword"))
self.assertEqual(self.handle, zookeeper.OK)
cv.wait(15.0)
cv.release()
self.assertEqual(self.connected, True, "Connection timed out to " + self.host)
(cid,passwd) = zookeeper.client_id(self.handle)
self.assertEqual(cid,123456)
self.assertEqual(passwd,"mypassword")
if __name__ == '__main__':
unittest.main()
|
apache-2.0
|
russss/Diamond
|
src/diamond/utils/scheduler.py
|
15
|
3642
|
# coding=utf-8
import time
import math
import multiprocessing
import os
import random
import sys
import signal
try:
from setproctitle import getproctitle, setproctitle
except ImportError:
setproctitle = None
from diamond.utils.signals import signal_to_exception
from diamond.utils.signals import SIGALRMException
from diamond.utils.signals import SIGHUPException
def collector_process(collector, metric_queue, log):
"""
"""
proc = multiprocessing.current_process()
if setproctitle:
setproctitle('%s - %s' % (getproctitle(), proc.name))
signal.signal(signal.SIGALRM, signal_to_exception)
signal.signal(signal.SIGHUP, signal_to_exception)
signal.signal(signal.SIGUSR2, signal_to_exception)
interval = float(collector.config['interval'])
log.debug('Starting')
log.debug('Interval: %s seconds', interval)
# Validate the interval
if interval <= 0:
log.critical('interval of %s is not valid!', interval)
sys.exit(1)
# Start the next execution at the next window plus some stagger delay to
# avoid having all collectors running at the same time
next_window = math.floor(time.time() / interval) * interval
stagger_offset = random.uniform(0, interval - 1)
# Allocate time till the end of the window for the collector to run. With a
# minimum of 1 second
max_time = int(max(interval - stagger_offset, 1))
log.debug('Max collection time: %s seconds', max_time)
# Setup stderr/stdout as /dev/null so random print statements in thrid
# party libs do not fail and prevent collectors from running.
# https://github.com/BrightcoveOS/Diamond/issues/722
sys.stdout = open(os.devnull, 'w')
sys.stderr = open(os.devnull, 'w')
while(True):
try:
time_to_sleep = (next_window + stagger_offset) - time.time()
if time_to_sleep > 0:
time.sleep(time_to_sleep)
elif time_to_sleep < 0:
# clock has jumped, lets skip missed intervals
next_window = time.time()
next_window += interval
# Ensure collector run times fit into the collection window
signal.alarm(max_time)
# Collect!
collector._run()
# Success! Disable the alarm
signal.alarm(0)
except SIGALRMException:
log.error('Took too long to run! Killed!')
# Adjust the stagger_offset to allow for more time to run the
# collector
stagger_offset = stagger_offset * 0.9
max_time = int(max(interval - stagger_offset, 1))
log.debug('Max collection time: %s seconds', max_time)
except SIGHUPException:
# Reload the config if requested
# We must first disable the alarm as we don't want it to interrupt
# us and end up with half a loaded config
signal.alarm(0)
log.info('Reloading config reload due to HUP')
collector.load_config()
log.info('Config reloaded')
except Exception:
log.exception('Collector failed!')
break
def handler_process(handlers, metric_queue, log):
proc = multiprocessing.current_process()
if setproctitle:
setproctitle('%s - %s' % (getproctitle(), proc.name))
log.debug('Starting process %s', proc.name)
while(True):
metric = metric_queue.get(block=True, timeout=None)
for handler in handlers:
if metric is not None:
handler._process(metric)
else:
handler._flush()
|
mit
|
felix-d/fabric
|
tests/test_project.py
|
3
|
5218
|
import unittest
import os
import fudge
from fudge.inspector import arg
from fabric.contrib import project
class UploadProjectTestCase(unittest.TestCase):
"""Test case for :func: `fabric.contrib.project.upload_project`."""
fake_tmp = "testtempfolder"
def setUp(self):
fudge.clear_expectations()
# We need to mock out run, local, and put
self.fake_run = fudge.Fake('project.run', callable=True)
self.patched_run = fudge.patch_object(
project,
'run',
self.fake_run
)
self.fake_local = fudge.Fake('local', callable=True)
self.patched_local = fudge.patch_object(
project,
'local',
self.fake_local
)
self.fake_put = fudge.Fake('put', callable=True)
self.patched_put = fudge.patch_object(
project,
'put',
self.fake_put
)
# We don't want to create temp folders
self.fake_mkdtemp = fudge.Fake(
'mkdtemp',
expect_call=True
).returns(self.fake_tmp)
self.patched_mkdtemp = fudge.patch_object(
project,
'mkdtemp',
self.fake_mkdtemp
)
def tearDown(self):
self.patched_run.restore()
self.patched_local.restore()
self.patched_put.restore()
fudge.clear_expectations()
@fudge.with_fakes
def test_temp_folder_is_used(self):
"""A unique temp folder is used for creating the archive to upload."""
# Exercise
project.upload_project()
@fudge.with_fakes
def test_project_is_archived_locally(self):
"""The project should be archived locally before being uploaded."""
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_local.with_args(arg.startswith("tar -czf")).next_call()
# Exercise
project.upload_project()
@fudge.with_fakes
def test_current_directory_is_uploaded_by_default(self):
"""By default the project uploaded is the current working directory."""
cwd_path, cwd_name = os.path.split(os.getcwd())
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_local.with_args(
arg.endswith("-C %s %s" % (cwd_path, cwd_name))
).next_call()
# Exercise
project.upload_project()
@fudge.with_fakes
def test_path_to_local_project_can_be_specified(self):
"""It should be possible to specify which local folder to upload."""
project_path = "path/to/my/project"
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_local.with_args(
arg.endswith("-C %s %s" % os.path.split(project_path))
).next_call()
# Exercise
project.upload_project(local_dir=project_path)
@fudge.with_fakes
def test_path_to_local_project_can_end_in_separator(self):
"""A local path ending in a separator should be handled correctly."""
project_path = "path/to/my"
base = "project"
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_local.with_args(
arg.endswith("-C %s %s" % (project_path, base))
).next_call()
# Exercise
project.upload_project(local_dir="%s/%s/" % (project_path, base))
@fudge.with_fakes
def test_default_remote_folder_is_home(self):
"""Project is uploaded to remote home by default."""
local_dir = "folder"
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_put.with_args(
"%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz"
).next_call()
# Exercise
project.upload_project(local_dir=local_dir)
@fudge.with_fakes
def test_path_to_remote_folder_can_be_specified(self):
"""It should be possible to specify which local folder to upload to."""
local_dir = "folder"
remote_path = "path/to/remote/folder"
# local() is called more than once so we need an extra next_call()
# otherwise fudge compares the args to the last call to local()
self.fake_put.with_args(
"%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path
).next_call()
# Exercise
project.upload_project(local_dir=local_dir, remote_dir=remote_path)
|
bsd-2-clause
|
delete/estofadora
|
estofadora/statement/tests/test_views.py
|
1
|
17443
|
# conding: utf-8
import datetime
from datetime import timedelta
from django.core.urlresolvers import reverse
from . import TestBase, CashForm, Cash, make_validated_form, create_cash
class HomeViewTest(TestBase):
def setUp(self):
self.login()
self.url = reverse('statement:home')
self.response = self.client.get(self.url)
def test_get(self):
self.assertEqual(self.response.status_code, 200)
def test_template(self):
self.assertTemplateUsed(self.response, 'statement/statement.html')
def test_get_logout(self):
self._test_get_logout(self.url)
def test_html(self):
self.assertContains(self.response, 'Caixa diário')
class CashViewTest(TestBase):
def setUp(self):
self.login()
self.url = reverse('statement:cash')
self.response = self.client.get(self.url)
def test_get(self):
self.assertEqual(self.response.status_code, 200)
def test_template(self):
self.assertTemplateUsed(self.response, 'statement/cash.html')
def test_get_logout(self):
self._test_get_logout(self.url)
def test_if_has_form(self):
form = self.response.context['form']
self.assertIsInstance(form, CashForm)
def test_html(self):
# + csrf input
self.assertContains(self.response, '<input', 7)
self.assertContains(self.response, 'submit', 2)
def test_response_after_post(self):
today = datetime.datetime.now().date()
fiveDaysBefore = today - timedelta(days=5)
cash1 = create_cash(date=today)
cash2 = create_cash(
history='Rent',
date=fiveDaysBefore, expenses=100, income=0
)
self.response = self.client.get(self.url)
self.assertContains(self.response, cash1.history)
# Cash2 should not appear because it is not from today
self.assertNotContains(self.response, cash2.history)
# Test if blaance before is right
self.assertContains(self.response, 'Valor total anterior: R$ -100,00')
# Test if atual blaance is right (500 -100 = 400)
self.assertContains(self.response, 'Valor total de hoje: R$ 400,00')
class CashSavePostTest(TestBase):
def setUp(self):
self.login()
data = make_validated_form(commit=False)
self.response = self.client.post(
reverse('statement:cash'), data, follow=True
)
def test_message(self):
self.assertContains(self.response, 'Registrado com sucesso!')
def test_if_saved(self):
self.assertTrue(Cash.objects.exists())
def test_redirected(self):
expected_url = reverse('statement:cash')
self.assertRedirects(
self.response, expected_url,
status_code=302,
target_status_code=200
)
class CashInvalidPostTest(TestBase):
def setUp(self):
self.login()
self.url = reverse('statement:cash')
def test_post_date_required(self):
data = make_validated_form(date='', commit=False)
self._test_if_got_errors(data)
def test_post_history_required(self):
data = make_validated_form(history='', commit=False)
self._test_if_got_errors(data)
def test_post_income_required(self):
data = make_validated_form(income='', commit=False)
self._test_if_got_errors(data)
def test_post_expenses_required(self):
data = make_validated_form(expenses='', commit=False)
self._test_if_got_errors(data)
def _test_if_got_errors(self, data):
self.response = self.client.post(self.url, data)
self.assertTrue(self.response.context['form'].errors)
class CashSearchPostTest(TestBase):
def setUp(self):
self.login()
august = datetime.datetime(2015, 8, 10).date()
september = datetime.datetime(2015, 9, 10).date()
october = datetime.datetime(2015, 10, 10).date()
self.cash1 = create_cash(
history='Cash1', date=september,
expenses=100, income=150
)
self.cash2 = create_cash(
history='Cash2', date=september,
expenses=200, income=250
)
self.cash3 = create_cash(
history='Cash3', date=august,
expenses=180, income=300
)
self.cash4 = create_cash(
history='Cash4', date=october,
expenses=280, income=500
)
data = {
'search_date': [september],
'search_form': ['']
}
self.response = self.client.post(
reverse('statement:cash'), data, follow=True
)
def test_data_after_post(self):
'The response must have the data of cash1 and cash2, only'
self.assertContains(self.response, self.cash1.history)
self.assertContains(self.response, self.cash1.income) # 150
self.assertContains(self.response, self.cash1.expenses) # 100
self.assertContains(self.response, self.cash2.history)
self.assertContains(self.response, self.cash2.income) # 250
self.assertContains(self.response, self.cash2.expenses) # 200
self.assertNotContains(self.response, self.cash3.history)
self.assertNotContains(self.response, self.cash3.income) # 300
self.assertNotContains(self.response, self.cash3.expenses) # 180
self.assertNotContains(self.response, self.cash4.history)
self.assertNotContains(self.response, self.cash4.income) # 500
self.assertNotContains(self.response, self.cash4.expenses) # 280
# Test balance before september (500 - 280 from august)
self.assertContains(self.response, 'Valor total anterior: R$ 120,00')
# Test atual balance ((150 - 100) + (250 - 200) + (120) = 220)
self.assertContains(self.response, 'Valor total de hoje: R$ 220,00')
def test_in_another_date(self):
'If there are not registries, must return a warning message'
today = datetime.datetime.now().date()
data = {
'search_date': [today],
'search_form': ['']
}
self.response = self.client.post(
reverse('statement:cash'), data, follow=True
)
self.assertContains(
self.response, 'Nenhum registro encontrado na data:'
)
class DeleteViewTest(TestBase):
def setUp(self):
self.login()
self.cash1 = create_cash()
self.cash2 = create_cash()
self.response = self.client.post(
reverse('statement:delete', args=[self.cash1.pk]), follow=True
)
def test_redirected(self):
expected_url = reverse('statement:cash')
self.assertRedirects(
self.response, expected_url, status_code=302,
target_status_code=200
)
def test_if_deleted(self):
self.assertEqual(len(Cash.objects.all()), 1)
def test_message(self):
self.assertContains(self.response, 'Registro removido com sucesso!')
class EditViewTest(TestBase):
def setUp(self):
self.login()
self.cash = create_cash()
self.url = reverse('statement:edit', args=[self.cash.pk])
self.response = self.client.get(self.url)
def tearDown(self):
Cash.objects.all().delete()
def test_get(self):
self.assertEqual(self.response.status_code, 200)
def test_get_logout(self):
self._test_get_logout(self.url)
def test_if_has_form(self):
form = self.response.context['form']
self.assertIsInstance(form, CashForm)
def test_csrf(self):
self.assertContains(self.response, 'csrfmiddlewaretoken')
def test_template(self):
self.assertTemplateUsed(self.response, 'statement/item_edit_form.html')
class EditPostTest(TestBase):
def setUp(self):
self.login()
self.cash = create_cash()
self.url = reverse('statement:edit', args=[self.cash.pk])
self.response = self.client.get(self.url)
self.form = self.response.context['form']
self.data = self.form.initial
def test_message(self):
self.response = self.client.post(self.url, self.data)
self.assertContains(self.response, 'Item alterado com sucesso!')
def test_post_income(self):
'Test if income has change'
self.data['income'] = '200'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().income, 200.00)
def test_post_expenses(self):
'Test if expenses has change'
self.data['expenses'] = '120'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().expenses, 120.00)
def test_post_history(self):
'Test if history has change'
self.data['history'] = 'Bill to pay'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().history, 'Bill to pay')
def test_post_date(self):
'Test if history has change'
self.data['date'] = '2015-10-10'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().date.isoformat(), '2015-10-10')
def test_post_date_two(self):
self.data['date'] = '10/10/2015'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().date.isoformat(), '2015-10-10')
def test_if_total_has_change(self):
'Test if total has change and is right'
self.data['income'] = '200'
self.data['expenses'] = '100'
self.response = self.client.post(self.url, self.data)
self.assertEqual(Cash.objects.first().total, 100.00)
class EditInvalidPostTest(TestBase):
def setUp(self):
self.login()
self.cash = create_cash()
self.url = reverse('statement:edit', args=[self.cash.pk])
self.response = self.client.get(self.url)
self.form = self.response.context['form']
self.data = self.form.initial
def test_post_date_required(self):
self.data['date'] = ''
self._test_if_got_errors()
def test_post_wrong_date(self):
self.data['date'] = '10-10-2015'
self._test_if_got_errors()
def test_post_income_required(self):
self.data['income'] = ''
self._test_if_got_errors()
def test_post_expenses_required(self):
self.data['expenses'] = ''
self._test_if_got_errors()
def test_post_history_required(self):
self.data['history'] = ''
self._test_if_got_errors()
def _test_if_got_errors(self):
self.response = self.client.post(self.url, self.data)
self.assertTrue(self.response.context['form'].errors)
self.assertTrue(self.response.context['form_error'])
class CashMonthViewTest(TestBase):
def setUp(self):
self.login()
self.today = datetime.datetime.now().date()
self.url = reverse('statement:cash_month')
self.response = self.client.get(self.url)
def test_get(self):
self.assertEqual(self.response.status_code, 200)
def test_template(self):
self.assertTemplateUsed(self.response, 'statement/cash_month.html')
def test_get_logout(self):
self._test_get_logout(self.url)
def test_html_when_is_empty(self):
# + csrf input
self.assertContains(self.response, '<input', 0)
self.assertContains(self.response, 'submit', 0)
self.assertContains(self.response, '<select', 0)
def test_message_when_is_empty(self):
'Must return a warning message when there are not any registry'
self.assertContains(self.response, 'Nenhum registro encontrado.')
def test_date_in_context(self):
'When was not choose a specific date, the default date is "today"'
self.assertEqual(
self.response.context['choose_month'], self.today.month
)
self.assertEqual(
self.response.context['choose_year'], self.today.year
)
class CashMonthSeachPostTest(TestBase):
def setUp(self):
self.login()
october = datetime.datetime(2015, 8, 10).date()
september = datetime.datetime(2015, 9, 10).date()
august = datetime.datetime(2015, 10, 10).date()
self.cash1 = create_cash(
history='Cash1', date=september,
expenses=100, income=100
)
self.cash2 = create_cash(
history='Cash2', date=september,
expenses=200, income=200
)
self.cash3 = create_cash(
history='Cash3', date=october,
expenses=180, income=300
)
self.cash4 = create_cash(
history='Cash4', date=august,
expenses=0, income=500
)
data = {
'selectmonth': september.month,
'selectyear': september.year
}
self.response = self.client.post(
reverse('statement:cash_month'), data, follow=True
)
def test_data_after_post(self):
'The response must have the data of cash1 and cash2, only'
self.assertContains(self.response, self.cash1.history)
self.assertContains(self.response, self.cash1.income)
self.assertContains(self.response, self.cash1.expenses)
self.assertContains(self.response, self.cash2.history)
self.assertContains(self.response, self.cash2.income)
self.assertContains(self.response, self.cash2.expenses)
self.assertNotContains(self.response, self.cash3.history)
self.assertNotContains(self.response, self.cash3.income)
self.assertNotContains(self.response, self.cash3.expenses)
# Test if balance before is right (500 from august)
self.assertContains(self.response, 'Valor total anterior: R$ 120,00')
# Test if atual balance is right (100 -100 + 200 -200 +300 - 180 = 120)
self.assertContains(self.response, 'Valor total: R$ 120,00')
self.test_inputs()
def test_inputs(self):
# + csrf input
self.assertContains(self.response, '<input', 1)
self.assertContains(self.response, 'submit', 1)
self.assertContains(self.response, '<select', 2)
def test_in_another_date(self):
'If there are not registries, must return a warning message'
data = {
'selectmonth': '01',
'selectyear': '2014'
}
self.response = self.client.post(
reverse('statement:cash_month'), data, follow=True
)
self.assertContains(self.response, 'Nenhum registro encontrado.')
class CashAnnualViewTest(TestBase):
def setUp(self):
self.login()
self.today = datetime.datetime.now().date()
self.url = reverse('statement:cash_annual')
self.response = self.client.get(self.url)
def test_get(self):
self.assertEqual(self.response.status_code, 200)
def test_template(self):
self.assertTemplateUsed(self.response, 'statement/cash_annual.html')
def test_get_logout(self):
self._test_get_logout(self.url)
def test_html_when_is_empty(self):
# + csrf input
self.assertContains(self.response, '<input', 0)
self.assertContains(self.response, '<select', 0)
self.assertContains(self.response, 'submit', 0)
def test_message_when_is_empty(self):
'Must return a warning message when there are not any registry'
self.assertContains(self.response, 'Nenhum registro encontrado.')
def test_year_in_context(self):
'When was not choose a specific year, the default date is atual year'
self.assertEqual(self.response.context['choose_year'], self.today.year)
class CashAnnualSeachPostTest(TestBase):
def setUp(self):
self.login()
september = datetime.datetime(2014, 9, 10).date()
october = datetime.datetime(2015, 10, 10).date()
january = datetime.datetime(2016, 1, 5).date()
self.cash1 = create_cash(
history='Cash1', date=september,
expenses=100, income=100
)
self.cash2 = create_cash(
history='Cash2', date=september,
expenses=160, income=200
)
self.cash3 = create_cash(
history='Cash3', date=october,
expenses=150, income=300
)
self.cash4 = create_cash(
history='Cash4', date=january,
expenses=0, income=500
)
data = {
'selectyear': january.year
}
self.url = reverse('statement:cash_annual')
self.response = self.client.post(self.url, data, follow=True)
def test_data_after_post(self):
# Test if balance before is right
# cash1.total + cash2.total + cash3.total => 0 + 40 + 150 = 190
self.assertContains(self.response, 'Valor total de 2015: R$ 190,00')
# Test if atual balance is right (500 from january)
self.assertContains(self.response, 'Valor total de 2016: R$ 500,00')
self.test_inputs()
def test_inputs(self):
# + csrf input
self.assertContains(self.response, '<input', 1)
self.assertContains(self.response, 'submit', 1)
self.assertContains(self.response, '<select', 1)
def test_chart(self):
self.assertContains(self.response, 'id="mychart"')
|
mit
|
dantebarba/docker-media-server
|
plex/Subliminal.bundle/Contents/Libraries/Shared/guessit/options.py
|
39
|
4547
|
from argparse import ArgumentParser
def build_opts(transformers=None):
opts = ArgumentParser()
opts.add_argument(dest='filename', help='Filename or release name to guess', nargs='*')
naming_opts = opts.add_argument_group("Naming")
naming_opts.add_argument('-t', '--type', dest='type', default=None,
help='The suggested file type: movie, episode. If undefined, type will be guessed.')
naming_opts.add_argument('-n', '--name-only', dest='name_only', action='store_true', default=False,
help='Parse files as name only. Disable folder parsing, extension parsing, and file content analysis.')
naming_opts.add_argument('-c', '--split-camel', dest='split_camel', action='store_true', default=False,
help='Split camel case part of filename.')
naming_opts.add_argument('-X', '--disabled-transformer', action='append', dest='disabled_transformers',
help='Transformer to disable (can be used multiple time)')
output_opts = opts.add_argument_group("Output")
output_opts.add_argument('-v', '--verbose', action='store_true', dest='verbose', default=False,
help='Display debug output')
output_opts.add_argument('-P', '--show-property', dest='show_property', default=None,
help='Display the value of a single property (title, series, videoCodec, year, type ...)'),
output_opts.add_argument('-u', '--unidentified', dest='unidentified', action='store_true', default=False,
help='Display the unidentified parts.'),
output_opts.add_argument('-a', '--advanced', dest='advanced', action='store_true', default=False,
help='Display advanced information for filename guesses, as json output')
output_opts.add_argument('-y', '--yaml', dest='yaml', action='store_true', default=False,
help='Display information for filename guesses as yaml output (like unit-test)')
output_opts.add_argument('-f', '--input-file', dest='input_file', default=False,
help='Read filenames from an input file.')
output_opts.add_argument('-d', '--demo', action='store_true', dest='demo', default=False,
help='Run a few builtin tests instead of analyzing a file')
information_opts = opts.add_argument_group("Information")
information_opts.add_argument('-p', '--properties', dest='properties', action='store_true', default=False,
help='Display properties that can be guessed.')
information_opts.add_argument('-V', '--values', dest='values', action='store_true', default=False,
help='Display property values that can be guessed.')
information_opts.add_argument('-s', '--transformers', dest='transformers', action='store_true', default=False,
help='Display transformers that can be used.')
information_opts.add_argument('--version', dest='version', action='store_true', default=False,
help='Display the guessit version.')
webservice_opts = opts.add_argument_group("guessit.io")
webservice_opts.add_argument('-b', '--bug', action='store_true', dest='submit_bug', default=False,
help='Submit a wrong detection to the guessit.io service')
other_opts = opts.add_argument_group("Other features")
other_opts.add_argument('-i', '--info', dest='info', default='filename',
help='The desired information type: filename, video, hash_mpc or a hash from python\'s '
'hashlib module, such as hash_md5, hash_sha1, ...; or a list of any of '
'them, comma-separated')
if transformers:
for transformer in transformers:
transformer.register_arguments(opts, naming_opts, output_opts, information_opts, webservice_opts, other_opts)
return opts, naming_opts, output_opts, information_opts, webservice_opts, other_opts
_opts, _naming_opts, _output_opts, _information_opts, _webservice_opts, _other_opts = None, None, None, None, None, None
def reload(transformers=None):
global _opts, _naming_opts, _output_opts, _information_opts, _webservice_opts, _other_opts
_opts, _naming_opts, _output_opts, _information_opts, _webservice_opts, _other_opts = build_opts(transformers)
def get_opts():
return _opts
|
gpl-3.0
|
mavit/ansible-modules-extras
|
source_control/gitlab_user.py
|
14
|
12435
|
#!/usr/bin/python
# (c) 2015, Werner Dijkerman (ikben@werner-dijkerman.nl)
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gitlab_user
short_description: Creates/updates/deletes Gitlab Users
description:
- When the user does not exists in Gitlab, it will be created.
- When the user does exists and state=absent, the user will be deleted.
- When changes are made to user, the user will be updated.
version_added: "2.1"
author: "Werner Dijkerman (@dj-wasabi)"
requirements:
- pyapi-gitlab python module
options:
server_url:
description:
- Url of Gitlab server, with protocol (http or https).
required: true
validate_certs:
description:
- When using https if SSL certificate needs to be verified.
required: false
default: true
aliases:
- verify_ssl
login_user:
description:
- Gitlab user name.
required: false
default: null
login_password:
description:
- Gitlab password for login_user
required: false
default: null
login_token:
description:
- Gitlab token for logging in.
required: false
default: null
name:
description:
- Name of the user you want to create
required: true
username:
description:
- The username of the user.
required: true
password:
description:
- The password of the user.
required: true
email:
description:
- The email that belongs to the user.
required: true
sshkey_name:
description:
- The name of the sshkey
required: false
default: null
sshkey_file:
description:
- The ssh key itself.
required: false
default: null
group:
description:
- Add user as an member to this group.
required: false
default: null
access_level:
description:
- The access level to the group. One of the following can be used.
- guest
- reporter
- developer
- master
- owner
required: false
default: null
state:
description:
- create or delete group.
- Possible values are present and absent.
required: false
default: present
choices: ["present", "absent"]
'''
EXAMPLES = '''
- name: "Delete Gitlab User"
local_action: gitlab_user
server_url="http://gitlab.dj-wasabi.local"
validate_certs=false
login_token="WnUzDsxjy8230-Dy_k"
username=myusername
state=absent
- name: "Create Gitlab User"
local_action: gitlab_user
server_url="https://gitlab.dj-wasabi.local"
validate_certs=true
login_user=dj-wasabi
login_password="MySecretPassword"
name=My Name
username=myusername
password=mysecretpassword
email=me@home.com
sshkey_name=MySSH
sshkey_file=ssh-rsa AAAAB3NzaC1yc...
state=present
'''
RETURN = '''# '''
try:
import gitlab
HAS_GITLAB_PACKAGE = True
except:
HAS_GITLAB_PACKAGE = False
class GitLabUser(object):
def __init__(self, module, git):
self._module = module
self._gitlab = git
def addToGroup(self, group_id, user_id, access_level):
if access_level == "guest":
level = 10
elif access_level == "reporter":
level = 20
elif access_level == "developer":
level = 30
elif access_level == "master":
level = 40
elif access_level == "owner":
level = 50
return self._gitlab.addgroupmember(group_id, user_id, level)
def createOrUpdateUser(self, user_name, user_username, user_password, user_email, user_sshkey_name, user_sshkey_file, group_name, access_level):
group_id = ''
arguments = {"name": user_name,
"username": user_username,
"email": user_email}
if group_name is not None:
if self.existsGroup(group_name):
group_id = self.getGroupId(group_name)
if self.existsUser(user_username):
self.updateUser(group_id, user_sshkey_name, user_sshkey_file, access_level, arguments)
else:
if self._module.check_mode:
self._module.exit_json(changed=True)
self.createUser(group_id, user_password, user_sshkey_name, user_sshkey_file, access_level, arguments)
def createUser(self, group_id, user_password, user_sshkey_name, user_sshkey_file, access_level, arguments):
user_changed = False
# Create the user
user_username = arguments['username']
user_name = arguments['name']
user_email = arguments['email']
if self._gitlab.createuser(password=user_password, **arguments):
user_id = self.getUserId(user_username)
if self._gitlab.addsshkeyuser(user_id=user_id, title=user_sshkey_name, key=user_sshkey_file):
user_changed = True
# Add the user to the group if group_id is not empty
if group_id != '':
if self.addToGroup(group_id, user_id, access_level):
user_changed = True
user_changed = True
# Exit with change to true or false
if user_changed:
self._module.exit_json(changed=True, result="Created the user")
else:
self._module.exit_json(changed=False)
def deleteUser(self, user_username):
user_id = self.getUserId(user_username)
if self._gitlab.deleteuser(user_id):
self._module.exit_json(changed=True, result="Successfully deleted user %s" % user_username)
else:
self._module.exit_json(changed=False, result="User %s already deleted or something went wrong" % user_username)
def existsGroup(self, group_name):
for group in self._gitlab.getall(self._gitlab.getgroups):
if group['name'] == group_name:
return True
return False
def existsUser(self, username):
found_user = self._gitlab.getusers(search=username)
for user in found_user:
if user['id'] != '':
return True
return False
def getGroupId(self, group_name):
for group in self._gitlab.getall(self._gitlab.getgroups):
if group['name'] == group_name:
return group['id']
def getUserId(self, username):
found_user = self._gitlab.getusers(search=username)
for user in found_user:
if user['id'] != '':
return user['id']
def updateUser(self, group_id, user_sshkey_name, user_sshkey_file, access_level, arguments):
user_changed = False
user_username = arguments['username']
user_id = self.getUserId(user_username)
user_data = self._gitlab.getuser(user_id=user_id)
# Lets check if we need to update the user
for arg_key, arg_value in arguments.items():
if user_data[arg_key] != arg_value:
user_changed = True
if user_changed:
if self._module.check_mode:
self._module.exit_json(changed=True)
self._gitlab.edituser(user_id=user_id, **arguments)
user_changed = True
if self._module.check_mode or self._gitlab.addsshkeyuser(user_id=user_id, title=user_sshkey_name, key=user_sshkey_file):
user_changed = True
if group_id != '':
if self._module.check_mode or self.addToGroup(group_id, user_id, access_level):
user_changed = True
if user_changed:
self._module.exit_json(changed=True, result="The user %s is updated" % user_username)
else:
self._module.exit_json(changed=False, result="The user %s is already up2date" % user_username)
def main():
global user_id
module = AnsibleModule(
argument_spec=dict(
server_url=dict(required=True),
validate_certs=dict(required=False, default=True, type=bool, aliases=['verify_ssl']),
login_user=dict(required=False, no_log=True),
login_password=dict(required=False, no_log=True),
login_token=dict(required=False, no_log=True),
name=dict(required=True),
username=dict(required=True),
password=dict(required=True),
email=dict(required=True),
sshkey_name=dict(required=False),
sshkey_file=dict(required=False),
group=dict(required=False),
access_level=dict(required=False, choices=["guest", "reporter", "developer", "master", "owner"]),
state=dict(default="present", choices=["present", "absent"]),
),
supports_check_mode=True
)
if not HAS_GITLAB_PACKAGE:
module.fail_json(msg="Missing required gitlab module (check docs or install with: pip install pyapi-gitlab")
server_url = module.params['server_url']
verify_ssl = module.params['validate_certs']
login_user = module.params['login_user']
login_password = module.params['login_password']
login_token = module.params['login_token']
user_name = module.params['name']
user_username = module.params['username']
user_password = module.params['password']
user_email = module.params['email']
user_sshkey_name = module.params['sshkey_name']
user_sshkey_file = module.params['sshkey_file']
group_name = module.params['group']
access_level = module.params['access_level']
state = module.params['state']
# We need both login_user and login_password or login_token, otherwise we fail.
if login_user is not None and login_password is not None:
use_credentials = True
elif login_token is not None:
use_credentials = False
else:
module.fail_json(msg="No login credentials are given. Use login_user with login_password, or login_token")
# Check if vars are none
if user_sshkey_file is not None and user_sshkey_name is not None:
use_sshkey = True
else:
use_sshkey = False
if group_name is not None and access_level is not None:
add_to_group = True
group_name = group_name.lower()
else:
add_to_group = False
user_username = user_username.lower()
# Lets make an connection to the Gitlab server_url, with either login_user and login_password
# or with login_token
try:
if use_credentials:
git = gitlab.Gitlab(host=server_url)
git.login(user=login_user, password=login_password)
else:
git = gitlab.Gitlab(server_url, token=login_token, verify_ssl=verify_ssl)
except Exception, e:
module.fail_json(msg="Failed to connect to Gitlab server: %s " % e)
# Validate if group exists and take action based on "state"
user = GitLabUser(module, git)
# Check if user exists, if not exists and state = absent, we exit nicely.
if not user.existsUser(user_username) and state == "absent":
module.exit_json(changed=False, result="User already deleted or does not exists")
else:
# User exists,
if state == "absent":
user.deleteUser(user_username)
else:
user.createOrUpdateUser(user_name, user_username, user_password, user_email, user_sshkey_name, user_sshkey_file, group_name, access_level)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
|
gpl-3.0
|
scalyr/scalyr-agent-2
|
scalyr_agent/third_party/pysnmp/smi/error.py
|
2
|
1778
|
from pyasn1.error import PyAsn1Error
from pysnmp.error import PySnmpError
class SmiError(PySnmpError, PyAsn1Error): pass
class MibLoadError(SmiError): pass
class MibNotFoundError(MibLoadError): pass
class MibOperationError(SmiError):
def __init__(self, **kwargs): self.__outArgs = kwargs
def __str__(self): return '%s(%s)' % (
self.__class__.__name__, self.__outArgs
)
def __getitem__(self, key): return self.__outArgs[key]
def __contains__(self, key): return key in self.__outArgs
def get(self, key, defVal=None): return self.__outArgs.get(key, defVal)
def keys(self): return self.__outArgs.keys()
def update(self, d): self.__outArgs.update(d)
# Aligned with SNMPv2 PDU error-status
class GenError(MibOperationError): pass
class NoAccessError(MibOperationError): pass
class WrongTypeError(MibOperationError): pass
class WrongLengthError(MibOperationError): pass
class WrongEncodingError(MibOperationError): pass
class WrongValueError(MibOperationError): pass
class NoCreationError(MibOperationError): pass
class InconsistentValueError(MibOperationError): pass
class ResourceUnavailableError(MibOperationError): pass
class CommitFailedError(MibOperationError): pass
class UndoFailedError(MibOperationError): pass
class AuthorizationError(MibOperationError): pass
class NotWritableError(MibOperationError): pass
class InconsistentNameError(MibOperationError): pass
# Aligned with SNMPv2 Var-Bind exceptions
class NoSuchObjectError(MibOperationError): pass
class NoSuchInstanceError(MibOperationError): pass
class EndOfMibViewError(MibOperationError): pass
# Row management
class TableRowManagement(MibOperationError): pass
class RowCreationWanted(TableRowManagement): pass
class RowDestructionWanted(TableRowManagement): pass
|
apache-2.0
|
ononeor12/python-social-auth
|
examples/django_me_example/example/wsgi.py
|
114
|
1131
|
"""
WSGI config for dj project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "example.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
|
bsd-3-clause
|
stormi/tsunami
|
src/secondaires/navigation/equipage/ordres/feu.py
|
1
|
2632
|
# -*-coding:Utf-8 -*
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'ordre Feu."""
from secondaires.navigation.equipage.signaux import *
from ..ordre import *
class Feu(Ordre):
"""Ordre feu.
Cet ordre demande au matelot de faire feu avec le canon précisé. Le
canon est supposé chargé en poudre et boulet.
"""
cle = "feu"
def __init__(self, matelot, navire, canon=None, bruyant=False):
Ordre.__init__(self, matelot, navire, canon, bruyant)
self.canon = canon
self.bruyant = bruyant
def executer(self):
"""Exécute l'ordre : colmate."""
navire = self.navire
matelot = self.matelot
personnage = matelot.personnage
canon = self.canon
salle = canon.parent
if canon.onces == 0:
yield SignalAbandonne("Ce canon n'est pas chargé en poudre",
self.bruyant)
if canon.projectile is None:
yield SignalAbandonne("Ce canon n'est pas chargé en boulet",
self.bruyant)
canon.tirer(auteur=personnage)
yield SignalTermine()
|
bsd-3-clause
|
mrjacobagilbert/gnuradio
|
gr-blocks/python/blocks/qa_tag_share.py
|
5
|
1801
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# SPDX-License-Identifier: GPL-3.0-or-later
#
#
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import pmt
class qa_tag_share(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def test_001_t(self):
# Constants
tag_key = 'in1_tag'
tag_value = 0
tag_offset = 0
in0_value = 1.0 + 1.0j
in1_value = 2.717
in0_data = [in0_value, ] * 10
in1_data = [in1_value, ] * 10
sink_data = in0_data
tag = gr.tag_t()
tag.key = pmt.to_pmt(tag_key)
tag.value = pmt.to_pmt(tag_value)
tag.offset = tag_offset
# Only tag Input 1 of the share block and see if it transfers
# to Output 0. Also verify that Input 0 stream is propagated to
# Output 0.
in0 = blocks.vector_source_c(in0_data, False, 1)
in1 = blocks.vector_source_f(in1_data, False, 1, (tag,))
tag_share = blocks.tag_share(gr.sizeof_gr_complex, gr.sizeof_float)
sink = blocks.vector_sink_c(1)
self.tb.connect(in0, (tag_share, 0))
self.tb.connect(in1, (tag_share, 1))
self.tb.connect(tag_share, sink)
self.tb.run()
self.assertEqual(len(sink.tags()), 1)
# print(sink.tags())
received_tag = sink.tags()[0]
self.assertEqual(pmt.to_python(received_tag.key), tag_key)
self.assertEqual(pmt.to_python(received_tag.value), tag_value)
self.assertEqual(received_tag.offset, tag_offset)
self.assertEqual(sink.data(), sink_data)
if __name__ == '__main__':
gr_unittest.run(qa_tag_share)
|
gpl-3.0
|
n0trax/ansible
|
contrib/inventory/gce.py
|
47
|
18346
|
#!/usr/bin/env python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
GCE external inventory script
=================================
Generates inventory that Ansible can understand by making API requests
Google Compute Engine via the libcloud library. Full install/configuration
instructions for the gce* modules can be found in the comments of
ansible/test/gce_tests.py.
When run against a specific host, this script returns the following variables
based on the data obtained from the libcloud Node object:
- gce_uuid
- gce_id
- gce_image
- gce_machine_type
- gce_private_ip
- gce_public_ip
- gce_name
- gce_description
- gce_status
- gce_zone
- gce_tags
- gce_metadata
- gce_network
- gce_subnetwork
When run in --list mode, instances are grouped by the following categories:
- zone:
zone group name examples are us-central1-b, europe-west1-a, etc.
- instance tags:
An entry is created for each tag. For example, if you have two instances
with a common tag called 'foo', they will both be grouped together under
the 'tag_foo' name.
- network name:
the name of the network is appended to 'network_' (e.g. the 'default'
network will result in a group named 'network_default')
- machine type
types follow a pattern like n1-standard-4, g1-small, etc.
- running status:
group name prefixed with 'status_' (e.g. status_running, status_stopped,..)
- image:
when using an ephemeral/scratch disk, this will be set to the image name
used when creating the instance (e.g. debian-7-wheezy-v20130816). when
your instance was created with a root persistent disk it will be set to
'persistent_disk' since there is no current way to determine the image.
Examples:
Execute uname on all instances in the us-central1-a zone
$ ansible -i gce.py us-central1-a -m shell -a "/bin/uname -a"
Use the GCE inventory script to print out instance specific information
$ contrib/inventory/gce.py --host my_instance
Author: Eric Johnson <erjohnso@google.com>
Contributors: Matt Hite <mhite@hotmail.com>, Tom Melendez <supertom@google.com>
Version: 0.0.3
'''
try:
import pkg_resources
except ImportError:
# Use pkg_resources to find the correct versions of libraries and set
# sys.path appropriately when there are multiversion installs. We don't
# fail here as there is code that better expresses the errors where the
# library is used.
pass
USER_AGENT_PRODUCT = "Ansible-gce_inventory_plugin"
USER_AGENT_VERSION = "v2"
import sys
import os
import argparse
from time import time
if sys.version_info >= (3, 0):
import configparser
else:
import ConfigParser as configparser
import logging
logging.getLogger('libcloud.common.google').addHandler(logging.NullHandler())
try:
import json
except ImportError:
import simplejson as json
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
_ = Provider.GCE
except:
sys.exit("GCE inventory script requires libcloud >= 0.13")
class CloudInventoryCache(object):
def __init__(self, cache_name='ansible-cloud-cache', cache_path='/tmp',
cache_max_age=300):
cache_dir = os.path.expanduser(cache_path)
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
self.cache_path_cache = os.path.join(cache_dir, cache_name)
self.cache_max_age = cache_max_age
def is_valid(self, max_age=None):
''' Determines if the cache files have expired, or if it is still valid '''
if max_age is None:
max_age = self.cache_max_age
if os.path.isfile(self.cache_path_cache):
mod_time = os.path.getmtime(self.cache_path_cache)
current_time = time()
if (mod_time + max_age) > current_time:
return True
return False
def get_all_data_from_cache(self, filename=''):
''' Reads the JSON inventory from the cache file. Returns Python dictionary. '''
data = ''
if not filename:
filename = self.cache_path_cache
with open(filename, 'r') as cache:
data = cache.read()
return json.loads(data)
def write_to_cache(self, data, filename=''):
''' Writes data to file as JSON. Returns True. '''
if not filename:
filename = self.cache_path_cache
json_data = json.dumps(data)
with open(filename, 'w') as cache:
cache.write(json_data)
return True
class GceInventory(object):
def __init__(self):
# Cache object
self.cache = None
# dictionary containing inventory read from disk
self.inventory = {}
# Read settings and parse CLI arguments
self.parse_cli_args()
self.config = self.get_config()
self.driver = self.get_gce_driver()
self.ip_type = self.get_inventory_options()
if self.ip_type:
self.ip_type = self.ip_type.lower()
# Cache management
start_inventory_time = time()
cache_used = False
if self.args.refresh_cache or not self.cache.is_valid():
self.do_api_calls_update_cache()
else:
self.load_inventory_from_cache()
cache_used = True
self.inventory['_meta']['stats'] = {'use_cache': True}
self.inventory['_meta']['stats'] = {
'inventory_load_time': time() - start_inventory_time,
'cache_used': cache_used
}
# Just display data for specific host
if self.args.host:
print(self.json_format_dict(
self.inventory['_meta']['hostvars'][self.args.host],
pretty=self.args.pretty))
else:
# Otherwise, assume user wants all instances grouped
zones = self.parse_env_zones()
print(self.json_format_dict(self.inventory,
pretty=self.args.pretty))
sys.exit(0)
def get_config(self):
"""
Reads the settings from the gce.ini file.
Populates a SafeConfigParser object with defaults and
attempts to read an .ini-style configuration from the filename
specified in GCE_INI_PATH. If the environment variable is
not present, the filename defaults to gce.ini in the current
working directory.
"""
gce_ini_default_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "gce.ini")
gce_ini_path = os.environ.get('GCE_INI_PATH', gce_ini_default_path)
# Create a ConfigParser.
# This provides empty defaults to each key, so that environment
# variable configuration (as opposed to INI configuration) is able
# to work.
config = configparser.SafeConfigParser(defaults={
'gce_service_account_email_address': '',
'gce_service_account_pem_file_path': '',
'gce_project_id': '',
'gce_zone': '',
'libcloud_secrets': '',
'inventory_ip_type': '',
'cache_path': '~/.ansible/tmp',
'cache_max_age': '300'
})
if 'gce' not in config.sections():
config.add_section('gce')
if 'inventory' not in config.sections():
config.add_section('inventory')
if 'cache' not in config.sections():
config.add_section('cache')
config.read(gce_ini_path)
#########
# Section added for processing ini settings
#########
# Set the instance_states filter based on config file options
self.instance_states = []
if config.has_option('gce', 'instance_states'):
states = config.get('gce', 'instance_states')
# Ignore if instance_states is an empty string.
if states:
self.instance_states = states.split(',')
# Caching
cache_path = config.get('cache', 'cache_path')
cache_max_age = config.getint('cache', 'cache_max_age')
# TOOD(supertom): support project-specific caches
cache_name = 'ansible-gce.cache'
self.cache = CloudInventoryCache(cache_path=cache_path,
cache_max_age=cache_max_age,
cache_name=cache_name)
return config
def get_inventory_options(self):
"""Determine inventory options. Environment variables always
take precedence over configuration files."""
ip_type = self.config.get('inventory', 'inventory_ip_type')
# If the appropriate environment variables are set, they override
# other configuration
ip_type = os.environ.get('INVENTORY_IP_TYPE', ip_type)
return ip_type
def get_gce_driver(self):
"""Determine the GCE authorization settings and return a
libcloud driver.
"""
# Attempt to get GCE params from a configuration file, if one
# exists.
secrets_path = self.config.get('gce', 'libcloud_secrets')
secrets_found = False
try:
import secrets
args = list(secrets.GCE_PARAMS)
kwargs = secrets.GCE_KEYWORD_PARAMS
secrets_found = True
except:
pass
if not secrets_found and secrets_path:
if not secrets_path.endswith('secrets.py'):
err = "Must specify libcloud secrets file as "
err += "/absolute/path/to/secrets.py"
sys.exit(err)
sys.path.append(os.path.dirname(secrets_path))
try:
import secrets
args = list(getattr(secrets, 'GCE_PARAMS', []))
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
secrets_found = True
except:
pass
if not secrets_found:
args = [
self.config.get('gce', 'gce_service_account_email_address'),
self.config.get('gce', 'gce_service_account_pem_file_path')
]
kwargs = {'project': self.config.get('gce', 'gce_project_id'),
'datacenter': self.config.get('gce', 'gce_zone')}
# If the appropriate environment variables are set, they override
# other configuration; process those into our args and kwargs.
args[0] = os.environ.get('GCE_EMAIL', args[0])
args[1] = os.environ.get('GCE_PEM_FILE_PATH', args[1])
args[1] = os.environ.get('GCE_CREDENTIALS_FILE_PATH', args[1])
kwargs['project'] = os.environ.get('GCE_PROJECT', kwargs['project'])
kwargs['datacenter'] = os.environ.get('GCE_ZONE', kwargs['datacenter'])
# Retrieve and return the GCE driver.
gce = get_driver(Provider.GCE)(*args, **kwargs)
gce.connection.user_agent_append(
'%s/%s' % (USER_AGENT_PRODUCT, USER_AGENT_VERSION),
)
return gce
def parse_env_zones(self):
'''returns a list of comma separated zones parsed from the GCE_ZONE environment variable.
If provided, this will be used to filter the results of the grouped_instances call'''
import csv
reader = csv.reader([os.environ.get('GCE_ZONE', "")], skipinitialspace=True)
zones = [r for r in reader]
return [z for z in zones[0]]
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file based on GCE')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all information about an instance')
parser.add_argument('--pretty', action='store_true', default=False,
help='Pretty format (default: False)')
parser.add_argument(
'--refresh-cache', action='store_true', default=False,
help='Force refresh of cache by making API requests (default: False - use cache files)')
self.args = parser.parse_args()
def node_to_dict(self, inst):
md = {}
if inst is None:
return {}
if 'items' in inst.extra['metadata']:
for entry in inst.extra['metadata']['items']:
md[entry['key']] = entry['value']
net = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
subnet = None
if 'subnetwork' in inst.extra['networkInterfaces'][0]:
subnet = inst.extra['networkInterfaces'][0]['subnetwork'].split('/')[-1]
# default to exernal IP unless user has specified they prefer internal
if self.ip_type == 'internal':
ssh_host = inst.private_ips[0]
else:
ssh_host = inst.public_ips[0] if len(inst.public_ips) >= 1 else inst.private_ips[0]
return {
'gce_uuid': inst.uuid,
'gce_id': inst.id,
'gce_image': inst.image,
'gce_machine_type': inst.size,
'gce_private_ip': inst.private_ips[0],
'gce_public_ip': inst.public_ips[0] if len(inst.public_ips) >= 1 else None,
'gce_name': inst.name,
'gce_description': inst.extra['description'],
'gce_status': inst.extra['status'],
'gce_zone': inst.extra['zone'].name,
'gce_tags': inst.extra['tags'],
'gce_metadata': md,
'gce_network': net,
'gce_subnetwork': subnet,
# Hosts don't have a public name, so we add an IP
'ansible_ssh_host': ssh_host
}
def load_inventory_from_cache(self):
''' Loads inventory from JSON on disk. '''
try:
self.inventory = self.cache.get_all_data_from_cache()
hosts = self.inventory['_meta']['hostvars']
except Exception as e:
print(
"Invalid inventory file %s. Please rebuild with -refresh-cache option."
% (self.cache.cache_path_cache))
raise
def do_api_calls_update_cache(self):
''' Do API calls and save data in cache. '''
zones = self.parse_env_zones()
data = self.group_instances(zones)
self.cache.write_to_cache(data)
self.inventory = data
def list_nodes(self):
all_nodes = []
params, more_results = {'maxResults': 500}, True
while more_results:
self.driver.connection.gce_params = params
all_nodes.extend(self.driver.list_nodes())
more_results = 'pageToken' in params
return all_nodes
def group_instances(self, zones=None):
'''Group all instances'''
groups = {}
meta = {}
meta["hostvars"] = {}
for node in self.list_nodes():
# This check filters on the desired instance states defined in the
# config file with the instance_states config option.
#
# If the instance_states list is _empty_ then _ALL_ states are returned.
#
# If the instance_states list is _populated_ then check the current
# state against the instance_states list
if self.instance_states and not node.extra['status'] in self.instance_states:
continue
name = node.name
meta["hostvars"][name] = self.node_to_dict(node)
zone = node.extra['zone'].name
# To avoid making multiple requests per zone
# we list all nodes and then filter the results
if zones and zone not in zones:
continue
if zone in groups:
groups[zone].append(name)
else:
groups[zone] = [name]
tags = node.extra['tags']
for t in tags:
if t.startswith('group-'):
tag = t[6:]
else:
tag = 'tag_%s' % t
if tag in groups:
groups[tag].append(name)
else:
groups[tag] = [name]
net = node.extra['networkInterfaces'][0]['network'].split('/')[-1]
net = 'network_%s' % net
if net in groups:
groups[net].append(name)
else:
groups[net] = [name]
machine_type = node.size
if machine_type in groups:
groups[machine_type].append(name)
else:
groups[machine_type] = [name]
image = node.image and node.image or 'persistent_disk'
if image in groups:
groups[image].append(name)
else:
groups[image] = [name]
status = node.extra['status']
stat = 'status_%s' % status.lower()
if stat in groups:
groups[stat].append(name)
else:
groups[stat] = [name]
for private_ip in node.private_ips:
groups[private_ip] = [name]
if len(node.public_ips) >= 1:
for public_ip in node.public_ips:
groups[public_ip] = [name]
groups["_meta"] = meta
return groups
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
# Run the script
if __name__ == '__main__':
GceInventory()
|
gpl-3.0
|
mozilla/stoneridge
|
python/src/Lib/plat-mac/Carbon/Appearance.py
|
81
|
27268
|
# Generated from 'Appearance.h'
def FOUR_CHAR_CODE(x): return x
kAppearanceEventClass = FOUR_CHAR_CODE('appr')
kAEAppearanceChanged = FOUR_CHAR_CODE('thme')
kAESystemFontChanged = FOUR_CHAR_CODE('sysf')
kAESmallSystemFontChanged = FOUR_CHAR_CODE('ssfn')
kAEViewsFontChanged = FOUR_CHAR_CODE('vfnt')
kThemeDataFileType = FOUR_CHAR_CODE('thme')
kThemePlatinumFileType = FOUR_CHAR_CODE('pltn')
kThemeCustomThemesFileType = FOUR_CHAR_CODE('scen')
kThemeSoundTrackFileType = FOUR_CHAR_CODE('tsnd')
kThemeBrushDialogBackgroundActive = 1
kThemeBrushDialogBackgroundInactive = 2
kThemeBrushAlertBackgroundActive = 3
kThemeBrushAlertBackgroundInactive = 4
kThemeBrushModelessDialogBackgroundActive = 5
kThemeBrushModelessDialogBackgroundInactive = 6
kThemeBrushUtilityWindowBackgroundActive = 7
kThemeBrushUtilityWindowBackgroundInactive = 8
kThemeBrushListViewSortColumnBackground = 9
kThemeBrushListViewBackground = 10
kThemeBrushIconLabelBackground = 11
kThemeBrushListViewSeparator = 12
kThemeBrushChasingArrows = 13
kThemeBrushDragHilite = 14
kThemeBrushDocumentWindowBackground = 15
kThemeBrushFinderWindowBackground = 16
kThemeBrushScrollBarDelimiterActive = 17
kThemeBrushScrollBarDelimiterInactive = 18
kThemeBrushFocusHighlight = 19
kThemeBrushPopupArrowActive = 20
kThemeBrushPopupArrowPressed = 21
kThemeBrushPopupArrowInactive = 22
kThemeBrushAppleGuideCoachmark = 23
kThemeBrushIconLabelBackgroundSelected = 24
kThemeBrushStaticAreaFill = 25
kThemeBrushActiveAreaFill = 26
kThemeBrushButtonFrameActive = 27
kThemeBrushButtonFrameInactive = 28
kThemeBrushButtonFaceActive = 29
kThemeBrushButtonFaceInactive = 30
kThemeBrushButtonFacePressed = 31
kThemeBrushButtonActiveDarkShadow = 32
kThemeBrushButtonActiveDarkHighlight = 33
kThemeBrushButtonActiveLightShadow = 34
kThemeBrushButtonActiveLightHighlight = 35
kThemeBrushButtonInactiveDarkShadow = 36
kThemeBrushButtonInactiveDarkHighlight = 37
kThemeBrushButtonInactiveLightShadow = 38
kThemeBrushButtonInactiveLightHighlight = 39
kThemeBrushButtonPressedDarkShadow = 40
kThemeBrushButtonPressedDarkHighlight = 41
kThemeBrushButtonPressedLightShadow = 42
kThemeBrushButtonPressedLightHighlight = 43
kThemeBrushBevelActiveLight = 44
kThemeBrushBevelActiveDark = 45
kThemeBrushBevelInactiveLight = 46
kThemeBrushBevelInactiveDark = 47
kThemeBrushNotificationWindowBackground = 48
kThemeBrushMovableModalBackground = 49
kThemeBrushSheetBackgroundOpaque = 50
kThemeBrushDrawerBackground = 51
kThemeBrushToolbarBackground = 52
kThemeBrushSheetBackgroundTransparent = 53
kThemeBrushMenuBackground = 54
kThemeBrushMenuBackgroundSelected = 55
kThemeBrushSheetBackground = kThemeBrushSheetBackgroundOpaque
kThemeBrushBlack = -1
kThemeBrushWhite = -2
kThemeBrushPrimaryHighlightColor = -3
kThemeBrushSecondaryHighlightColor = -4
kThemeTextColorDialogActive = 1
kThemeTextColorDialogInactive = 2
kThemeTextColorAlertActive = 3
kThemeTextColorAlertInactive = 4
kThemeTextColorModelessDialogActive = 5
kThemeTextColorModelessDialogInactive = 6
kThemeTextColorWindowHeaderActive = 7
kThemeTextColorWindowHeaderInactive = 8
kThemeTextColorPlacardActive = 9
kThemeTextColorPlacardInactive = 10
kThemeTextColorPlacardPressed = 11
kThemeTextColorPushButtonActive = 12
kThemeTextColorPushButtonInactive = 13
kThemeTextColorPushButtonPressed = 14
kThemeTextColorBevelButtonActive = 15
kThemeTextColorBevelButtonInactive = 16
kThemeTextColorBevelButtonPressed = 17
kThemeTextColorPopupButtonActive = 18
kThemeTextColorPopupButtonInactive = 19
kThemeTextColorPopupButtonPressed = 20
kThemeTextColorIconLabel = 21
kThemeTextColorListView = 22
kThemeTextColorDocumentWindowTitleActive = 23
kThemeTextColorDocumentWindowTitleInactive = 24
kThemeTextColorMovableModalWindowTitleActive = 25
kThemeTextColorMovableModalWindowTitleInactive = 26
kThemeTextColorUtilityWindowTitleActive = 27
kThemeTextColorUtilityWindowTitleInactive = 28
kThemeTextColorPopupWindowTitleActive = 29
kThemeTextColorPopupWindowTitleInactive = 30
kThemeTextColorRootMenuActive = 31
kThemeTextColorRootMenuSelected = 32
kThemeTextColorRootMenuDisabled = 33
kThemeTextColorMenuItemActive = 34
kThemeTextColorMenuItemSelected = 35
kThemeTextColorMenuItemDisabled = 36
kThemeTextColorPopupLabelActive = 37
kThemeTextColorPopupLabelInactive = 38
kThemeTextColorTabFrontActive = 39
kThemeTextColorTabNonFrontActive = 40
kThemeTextColorTabNonFrontPressed = 41
kThemeTextColorTabFrontInactive = 42
kThemeTextColorTabNonFrontInactive = 43
kThemeTextColorIconLabelSelected = 44
kThemeTextColorBevelButtonStickyActive = 45
kThemeTextColorBevelButtonStickyInactive = 46
kThemeTextColorNotification = 47
kThemeTextColorBlack = -1
kThemeTextColorWhite = -2
kThemeStateInactive = 0
kThemeStateActive = 1
kThemeStatePressed = 2
kThemeStateRollover = 6
kThemeStateUnavailable = 7
kThemeStateUnavailableInactive = 8
kThemeStateDisabled = 0
kThemeStatePressedUp = 2
kThemeStatePressedDown = 3
kThemeArrowCursor = 0
kThemeCopyArrowCursor = 1
kThemeAliasArrowCursor = 2
kThemeContextualMenuArrowCursor = 3
kThemeIBeamCursor = 4
kThemeCrossCursor = 5
kThemePlusCursor = 6
kThemeWatchCursor = 7
kThemeClosedHandCursor = 8
kThemeOpenHandCursor = 9
kThemePointingHandCursor = 10
kThemeCountingUpHandCursor = 11
kThemeCountingDownHandCursor = 12
kThemeCountingUpAndDownHandCursor = 13
kThemeSpinningCursor = 14
kThemeResizeLeftCursor = 15
kThemeResizeRightCursor = 16
kThemeResizeLeftRightCursor = 17
kThemeMenuBarNormal = 0
kThemeMenuBarSelected = 1
kThemeMenuSquareMenuBar = (1 << 0)
kThemeMenuActive = 0
kThemeMenuSelected = 1
kThemeMenuDisabled = 3
kThemeMenuTypePullDown = 0
kThemeMenuTypePopUp = 1
kThemeMenuTypeHierarchical = 2
kThemeMenuTypeInactive = 0x0100
kThemeMenuItemPlain = 0
kThemeMenuItemHierarchical = 1
kThemeMenuItemScrollUpArrow = 2
kThemeMenuItemScrollDownArrow = 3
kThemeMenuItemAtTop = 0x0100
kThemeMenuItemAtBottom = 0x0200
kThemeMenuItemHierBackground = 0x0400
kThemeMenuItemPopUpBackground = 0x0800
kThemeMenuItemHasIcon = 0x8000
kThemeMenuItemNoBackground = 0x4000
kThemeBackgroundTabPane = 1
kThemeBackgroundPlacard = 2
kThemeBackgroundWindowHeader = 3
kThemeBackgroundListViewWindowHeader = 4
kThemeBackgroundSecondaryGroupBox = 5
kThemeNameTag = FOUR_CHAR_CODE('name')
kThemeVariantNameTag = FOUR_CHAR_CODE('varn')
kThemeVariantBaseTintTag = FOUR_CHAR_CODE('tint')
kThemeHighlightColorTag = FOUR_CHAR_CODE('hcol')
kThemeScrollBarArrowStyleTag = FOUR_CHAR_CODE('sbar')
kThemeScrollBarThumbStyleTag = FOUR_CHAR_CODE('sbth')
kThemeSoundsEnabledTag = FOUR_CHAR_CODE('snds')
kThemeDblClickCollapseTag = FOUR_CHAR_CODE('coll')
kThemeAppearanceFileNameTag = FOUR_CHAR_CODE('thme')
kThemeSystemFontTag = FOUR_CHAR_CODE('lgsf')
kThemeSmallSystemFontTag = FOUR_CHAR_CODE('smsf')
kThemeViewsFontTag = FOUR_CHAR_CODE('vfnt')
kThemeViewsFontSizeTag = FOUR_CHAR_CODE('vfsz')
kThemeDesktopPatternNameTag = FOUR_CHAR_CODE('patn')
kThemeDesktopPatternTag = FOUR_CHAR_CODE('patt')
kThemeDesktopPictureNameTag = FOUR_CHAR_CODE('dpnm')
kThemeDesktopPictureAliasTag = FOUR_CHAR_CODE('dpal')
kThemeDesktopPictureAlignmentTag = FOUR_CHAR_CODE('dpan')
kThemeHighlightColorNameTag = FOUR_CHAR_CODE('hcnm')
kThemeExamplePictureIDTag = FOUR_CHAR_CODE('epic')
kThemeSoundTrackNameTag = FOUR_CHAR_CODE('sndt')
kThemeSoundMaskTag = FOUR_CHAR_CODE('smsk')
kThemeUserDefinedTag = FOUR_CHAR_CODE('user')
kThemeSmoothFontEnabledTag = FOUR_CHAR_CODE('smoo')
kThemeSmoothFontMinSizeTag = FOUR_CHAR_CODE('smos')
kTiledOnScreen = 1
kCenterOnScreen = 2
kFitToScreen = 3
kFillScreen = 4
kUseBestGuess = 5
kThemeCheckBoxClassicX = 0
kThemeCheckBoxCheckMark = 1
kThemeScrollBarArrowsSingle = 0
kThemeScrollBarArrowsLowerRight = 1
kThemeScrollBarThumbNormal = 0
kThemeScrollBarThumbProportional = 1
kThemeSystemFont = 0
kThemeSmallSystemFont = 1
kThemeSmallEmphasizedSystemFont = 2
kThemeViewsFont = 3
kThemeEmphasizedSystemFont = 4
kThemeApplicationFont = 5
kThemeLabelFont = 6
kThemeMenuTitleFont = 100
kThemeMenuItemFont = 101
kThemeMenuItemMarkFont = 102
kThemeMenuItemCmdKeyFont = 103
kThemeWindowTitleFont = 104
kThemePushButtonFont = 105
kThemeUtilityWindowTitleFont = 106
kThemeAlertHeaderFont = 107
kThemeCurrentPortFont = 200
kThemeTabNonFront = 0
kThemeTabNonFrontPressed = 1
kThemeTabNonFrontInactive = 2
kThemeTabFront = 3
kThemeTabFrontInactive = 4
kThemeTabNonFrontUnavailable = 5
kThemeTabFrontUnavailable = 6
kThemeTabNorth = 0
kThemeTabSouth = 1
kThemeTabEast = 2
kThemeTabWest = 3
kThemeSmallTabHeight = 16
kThemeLargeTabHeight = 21
kThemeTabPaneOverlap = 3
kThemeSmallTabHeightMax = 19
kThemeLargeTabHeightMax = 24
kThemeMediumScrollBar = 0
kThemeSmallScrollBar = 1
kThemeMediumSlider = 2
kThemeMediumProgressBar = 3
kThemeMediumIndeterminateBar = 4
kThemeRelevanceBar = 5
kThemeSmallSlider = 6
kThemeLargeProgressBar = 7
kThemeLargeIndeterminateBar = 8
kThemeTrackActive = 0
kThemeTrackDisabled = 1
kThemeTrackNothingToScroll = 2
kThemeTrackInactive = 3
kThemeLeftOutsideArrowPressed = 0x01
kThemeLeftInsideArrowPressed = 0x02
kThemeLeftTrackPressed = 0x04
kThemeThumbPressed = 0x08
kThemeRightTrackPressed = 0x10
kThemeRightInsideArrowPressed = 0x20
kThemeRightOutsideArrowPressed = 0x40
kThemeTopOutsideArrowPressed = kThemeLeftOutsideArrowPressed
kThemeTopInsideArrowPressed = kThemeLeftInsideArrowPressed
kThemeTopTrackPressed = kThemeLeftTrackPressed
kThemeBottomTrackPressed = kThemeRightTrackPressed
kThemeBottomInsideArrowPressed = kThemeRightInsideArrowPressed
kThemeBottomOutsideArrowPressed = kThemeRightOutsideArrowPressed
kThemeThumbPlain = 0
kThemeThumbUpward = 1
kThemeThumbDownward = 2
kThemeTrackHorizontal = (1 << 0)
kThemeTrackRightToLeft = (1 << 1)
kThemeTrackShowThumb = (1 << 2)
kThemeTrackThumbRgnIsNotGhost = (1 << 3)
kThemeTrackNoScrollBarArrows = (1 << 4)
kThemeWindowHasGrow = (1 << 0)
kThemeWindowHasHorizontalZoom = (1 << 3)
kThemeWindowHasVerticalZoom = (1 << 4)
kThemeWindowHasFullZoom = kThemeWindowHasHorizontalZoom + kThemeWindowHasVerticalZoom
kThemeWindowHasCloseBox = (1 << 5)
kThemeWindowHasCollapseBox = (1 << 6)
kThemeWindowHasTitleText = (1 << 7)
kThemeWindowIsCollapsed = (1 << 8)
kThemeWindowHasDirty = (1 << 9)
kThemeDocumentWindow = 0
kThemeDialogWindow = 1
kThemeMovableDialogWindow = 2
kThemeAlertWindow = 3
kThemeMovableAlertWindow = 4
kThemePlainDialogWindow = 5
kThemeShadowDialogWindow = 6
kThemePopupWindow = 7
kThemeUtilityWindow = 8
kThemeUtilitySideWindow = 9
kThemeSheetWindow = 10
kThemeDrawerWindow = 11
kThemeWidgetCloseBox = 0
kThemeWidgetZoomBox = 1
kThemeWidgetCollapseBox = 2
kThemeWidgetDirtyCloseBox = 6
kThemeArrowLeft = 0
kThemeArrowDown = 1
kThemeArrowRight = 2
kThemeArrowUp = 3
kThemeArrow3pt = 0
kThemeArrow5pt = 1
kThemeArrow7pt = 2
kThemeArrow9pt = 3
kThemeGrowLeft = (1 << 0)
kThemeGrowRight = (1 << 1)
kThemeGrowUp = (1 << 2)
kThemeGrowDown = (1 << 3)
kThemePushButton = 0
kThemeCheckBox = 1
kThemeRadioButton = 2
kThemeBevelButton = 3
kThemeArrowButton = 4
kThemePopupButton = 5
kThemeDisclosureButton = 6
kThemeIncDecButton = 7
kThemeSmallBevelButton = 8
kThemeMediumBevelButton = 3
kThemeLargeBevelButton = 9
kThemeListHeaderButton = 10
kThemeRoundButton = 11
kThemeLargeRoundButton = 12
kThemeSmallCheckBox = 13
kThemeSmallRadioButton = 14
kThemeRoundedBevelButton = 15
kThemeNormalCheckBox = kThemeCheckBox
kThemeNormalRadioButton = kThemeRadioButton
kThemeButtonOff = 0
kThemeButtonOn = 1
kThemeButtonMixed = 2
kThemeDisclosureRight = 0
kThemeDisclosureDown = 1
kThemeDisclosureLeft = 2
kThemeAdornmentNone = 0
kThemeAdornmentDefault = (1 << 0)
kThemeAdornmentFocus = (1 << 2)
kThemeAdornmentRightToLeft = (1 << 4)
kThemeAdornmentDrawIndicatorOnly = (1 << 5)
kThemeAdornmentHeaderButtonLeftNeighborSelected = (1 << 6)
kThemeAdornmentHeaderButtonRightNeighborSelected = (1 << 7)
kThemeAdornmentHeaderButtonSortUp = (1 << 8)
kThemeAdornmentHeaderMenuButton = (1 << 9)
kThemeAdornmentHeaderButtonNoShadow = (1 << 10)
kThemeAdornmentHeaderButtonShadowOnly = (1 << 11)
kThemeAdornmentNoShadow = kThemeAdornmentHeaderButtonNoShadow
kThemeAdornmentShadowOnly = kThemeAdornmentHeaderButtonShadowOnly
kThemeAdornmentArrowLeftArrow = (1 << 6)
kThemeAdornmentArrowDownArrow = (1 << 7)
kThemeAdornmentArrowDoubleArrow = (1 << 8)
kThemeAdornmentArrowUpArrow = (1 << 9)
kThemeNoSounds = 0
kThemeWindowSoundsMask = (1 << 0)
kThemeMenuSoundsMask = (1 << 1)
kThemeControlSoundsMask = (1 << 2)
kThemeFinderSoundsMask = (1 << 3)
kThemeDragSoundNone = 0
kThemeDragSoundMoveWindow = FOUR_CHAR_CODE('wmov')
kThemeDragSoundGrowWindow = FOUR_CHAR_CODE('wgro')
kThemeDragSoundMoveUtilWindow = FOUR_CHAR_CODE('umov')
kThemeDragSoundGrowUtilWindow = FOUR_CHAR_CODE('ugro')
kThemeDragSoundMoveDialog = FOUR_CHAR_CODE('dmov')
kThemeDragSoundMoveAlert = FOUR_CHAR_CODE('amov')
kThemeDragSoundMoveIcon = FOUR_CHAR_CODE('imov')
kThemeDragSoundSliderThumb = FOUR_CHAR_CODE('slth')
kThemeDragSoundSliderGhost = FOUR_CHAR_CODE('slgh')
kThemeDragSoundScrollBarThumb = FOUR_CHAR_CODE('sbth')
kThemeDragSoundScrollBarGhost = FOUR_CHAR_CODE('sbgh')
kThemeDragSoundScrollBarArrowDecreasing = FOUR_CHAR_CODE('sbad')
kThemeDragSoundScrollBarArrowIncreasing = FOUR_CHAR_CODE('sbai')
kThemeDragSoundDragging = FOUR_CHAR_CODE('drag')
kThemeSoundNone = 0
kThemeSoundMenuOpen = FOUR_CHAR_CODE('mnuo')
kThemeSoundMenuClose = FOUR_CHAR_CODE('mnuc')
kThemeSoundMenuItemHilite = FOUR_CHAR_CODE('mnui')
kThemeSoundMenuItemRelease = FOUR_CHAR_CODE('mnus')
kThemeSoundWindowClosePress = FOUR_CHAR_CODE('wclp')
kThemeSoundWindowCloseEnter = FOUR_CHAR_CODE('wcle')
kThemeSoundWindowCloseExit = FOUR_CHAR_CODE('wclx')
kThemeSoundWindowCloseRelease = FOUR_CHAR_CODE('wclr')
kThemeSoundWindowZoomPress = FOUR_CHAR_CODE('wzmp')
kThemeSoundWindowZoomEnter = FOUR_CHAR_CODE('wzme')
kThemeSoundWindowZoomExit = FOUR_CHAR_CODE('wzmx')
kThemeSoundWindowZoomRelease = FOUR_CHAR_CODE('wzmr')
kThemeSoundWindowCollapsePress = FOUR_CHAR_CODE('wcop')
kThemeSoundWindowCollapseEnter = FOUR_CHAR_CODE('wcoe')
kThemeSoundWindowCollapseExit = FOUR_CHAR_CODE('wcox')
kThemeSoundWindowCollapseRelease = FOUR_CHAR_CODE('wcor')
kThemeSoundWindowDragBoundary = FOUR_CHAR_CODE('wdbd')
kThemeSoundUtilWinClosePress = FOUR_CHAR_CODE('uclp')
kThemeSoundUtilWinCloseEnter = FOUR_CHAR_CODE('ucle')
kThemeSoundUtilWinCloseExit = FOUR_CHAR_CODE('uclx')
kThemeSoundUtilWinCloseRelease = FOUR_CHAR_CODE('uclr')
kThemeSoundUtilWinZoomPress = FOUR_CHAR_CODE('uzmp')
kThemeSoundUtilWinZoomEnter = FOUR_CHAR_CODE('uzme')
kThemeSoundUtilWinZoomExit = FOUR_CHAR_CODE('uzmx')
kThemeSoundUtilWinZoomRelease = FOUR_CHAR_CODE('uzmr')
kThemeSoundUtilWinCollapsePress = FOUR_CHAR_CODE('ucop')
kThemeSoundUtilWinCollapseEnter = FOUR_CHAR_CODE('ucoe')
kThemeSoundUtilWinCollapseExit = FOUR_CHAR_CODE('ucox')
kThemeSoundUtilWinCollapseRelease = FOUR_CHAR_CODE('ucor')
kThemeSoundUtilWinDragBoundary = FOUR_CHAR_CODE('udbd')
kThemeSoundWindowOpen = FOUR_CHAR_CODE('wopn')
kThemeSoundWindowClose = FOUR_CHAR_CODE('wcls')
kThemeSoundWindowZoomIn = FOUR_CHAR_CODE('wzmi')
kThemeSoundWindowZoomOut = FOUR_CHAR_CODE('wzmo')
kThemeSoundWindowCollapseUp = FOUR_CHAR_CODE('wcol')
kThemeSoundWindowCollapseDown = FOUR_CHAR_CODE('wexp')
kThemeSoundWindowActivate = FOUR_CHAR_CODE('wact')
kThemeSoundUtilWindowOpen = FOUR_CHAR_CODE('uopn')
kThemeSoundUtilWindowClose = FOUR_CHAR_CODE('ucls')
kThemeSoundUtilWindowZoomIn = FOUR_CHAR_CODE('uzmi')
kThemeSoundUtilWindowZoomOut = FOUR_CHAR_CODE('uzmo')
kThemeSoundUtilWindowCollapseUp = FOUR_CHAR_CODE('ucol')
kThemeSoundUtilWindowCollapseDown = FOUR_CHAR_CODE('uexp')
kThemeSoundUtilWindowActivate = FOUR_CHAR_CODE('uact')
kThemeSoundDialogOpen = FOUR_CHAR_CODE('dopn')
kThemeSoundDialogClose = FOUR_CHAR_CODE('dlgc')
kThemeSoundAlertOpen = FOUR_CHAR_CODE('aopn')
kThemeSoundAlertClose = FOUR_CHAR_CODE('altc')
kThemeSoundPopupWindowOpen = FOUR_CHAR_CODE('pwop')
kThemeSoundPopupWindowClose = FOUR_CHAR_CODE('pwcl')
kThemeSoundButtonPress = FOUR_CHAR_CODE('btnp')
kThemeSoundButtonEnter = FOUR_CHAR_CODE('btne')
kThemeSoundButtonExit = FOUR_CHAR_CODE('btnx')
kThemeSoundButtonRelease = FOUR_CHAR_CODE('btnr')
kThemeSoundDefaultButtonPress = FOUR_CHAR_CODE('dbtp')
kThemeSoundDefaultButtonEnter = FOUR_CHAR_CODE('dbte')
kThemeSoundDefaultButtonExit = FOUR_CHAR_CODE('dbtx')
kThemeSoundDefaultButtonRelease = FOUR_CHAR_CODE('dbtr')
kThemeSoundCancelButtonPress = FOUR_CHAR_CODE('cbtp')
kThemeSoundCancelButtonEnter = FOUR_CHAR_CODE('cbte')
kThemeSoundCancelButtonExit = FOUR_CHAR_CODE('cbtx')
kThemeSoundCancelButtonRelease = FOUR_CHAR_CODE('cbtr')
kThemeSoundCheckboxPress = FOUR_CHAR_CODE('chkp')
kThemeSoundCheckboxEnter = FOUR_CHAR_CODE('chke')
kThemeSoundCheckboxExit = FOUR_CHAR_CODE('chkx')
kThemeSoundCheckboxRelease = FOUR_CHAR_CODE('chkr')
kThemeSoundRadioPress = FOUR_CHAR_CODE('radp')
kThemeSoundRadioEnter = FOUR_CHAR_CODE('rade')
kThemeSoundRadioExit = FOUR_CHAR_CODE('radx')
kThemeSoundRadioRelease = FOUR_CHAR_CODE('radr')
kThemeSoundScrollArrowPress = FOUR_CHAR_CODE('sbap')
kThemeSoundScrollArrowEnter = FOUR_CHAR_CODE('sbae')
kThemeSoundScrollArrowExit = FOUR_CHAR_CODE('sbax')
kThemeSoundScrollArrowRelease = FOUR_CHAR_CODE('sbar')
kThemeSoundScrollEndOfTrack = FOUR_CHAR_CODE('sbte')
kThemeSoundScrollTrackPress = FOUR_CHAR_CODE('sbtp')
kThemeSoundSliderEndOfTrack = FOUR_CHAR_CODE('slte')
kThemeSoundSliderTrackPress = FOUR_CHAR_CODE('sltp')
kThemeSoundBalloonOpen = FOUR_CHAR_CODE('blno')
kThemeSoundBalloonClose = FOUR_CHAR_CODE('blnc')
kThemeSoundBevelPress = FOUR_CHAR_CODE('bevp')
kThemeSoundBevelEnter = FOUR_CHAR_CODE('beve')
kThemeSoundBevelExit = FOUR_CHAR_CODE('bevx')
kThemeSoundBevelRelease = FOUR_CHAR_CODE('bevr')
kThemeSoundLittleArrowUpPress = FOUR_CHAR_CODE('laup')
kThemeSoundLittleArrowDnPress = FOUR_CHAR_CODE('ladp')
kThemeSoundLittleArrowEnter = FOUR_CHAR_CODE('lare')
kThemeSoundLittleArrowExit = FOUR_CHAR_CODE('larx')
kThemeSoundLittleArrowUpRelease = FOUR_CHAR_CODE('laur')
kThemeSoundLittleArrowDnRelease = FOUR_CHAR_CODE('ladr')
kThemeSoundPopupPress = FOUR_CHAR_CODE('popp')
kThemeSoundPopupEnter = FOUR_CHAR_CODE('pope')
kThemeSoundPopupExit = FOUR_CHAR_CODE('popx')
kThemeSoundPopupRelease = FOUR_CHAR_CODE('popr')
kThemeSoundDisclosurePress = FOUR_CHAR_CODE('dscp')
kThemeSoundDisclosureEnter = FOUR_CHAR_CODE('dsce')
kThemeSoundDisclosureExit = FOUR_CHAR_CODE('dscx')
kThemeSoundDisclosureRelease = FOUR_CHAR_CODE('dscr')
kThemeSoundTabPressed = FOUR_CHAR_CODE('tabp')
kThemeSoundTabEnter = FOUR_CHAR_CODE('tabe')
kThemeSoundTabExit = FOUR_CHAR_CODE('tabx')
kThemeSoundTabRelease = FOUR_CHAR_CODE('tabr')
kThemeSoundDragTargetHilite = FOUR_CHAR_CODE('dthi')
kThemeSoundDragTargetUnhilite = FOUR_CHAR_CODE('dtuh')
kThemeSoundDragTargetDrop = FOUR_CHAR_CODE('dtdr')
kThemeSoundEmptyTrash = FOUR_CHAR_CODE('ftrs')
kThemeSoundSelectItem = FOUR_CHAR_CODE('fsel')
kThemeSoundNewItem = FOUR_CHAR_CODE('fnew')
kThemeSoundReceiveDrop = FOUR_CHAR_CODE('fdrp')
kThemeSoundCopyDone = FOUR_CHAR_CODE('fcpd')
kThemeSoundResolveAlias = FOUR_CHAR_CODE('fral')
kThemeSoundLaunchApp = FOUR_CHAR_CODE('flap')
kThemeSoundDiskInsert = FOUR_CHAR_CODE('dski')
kThemeSoundDiskEject = FOUR_CHAR_CODE('dske')
kThemeSoundFinderDragOnIcon = FOUR_CHAR_CODE('fdon')
kThemeSoundFinderDragOffIcon = FOUR_CHAR_CODE('fdof')
kThemePopupTabNormalPosition = 0
kThemePopupTabCenterOnWindow = 1
kThemePopupTabCenterOnOffset = 2
kThemeMetricScrollBarWidth = 0
kThemeMetricSmallScrollBarWidth = 1
kThemeMetricCheckBoxHeight = 2
kThemeMetricRadioButtonHeight = 3
kThemeMetricEditTextWhitespace = 4
kThemeMetricEditTextFrameOutset = 5
kThemeMetricListBoxFrameOutset = 6
kThemeMetricFocusRectOutset = 7
kThemeMetricImageWellThickness = 8
kThemeMetricScrollBarOverlap = 9
kThemeMetricLargeTabHeight = 10
kThemeMetricLargeTabCapsWidth = 11
kThemeMetricTabFrameOverlap = 12
kThemeMetricTabIndentOrStyle = 13
kThemeMetricTabOverlap = 14
kThemeMetricSmallTabHeight = 15
kThemeMetricSmallTabCapsWidth = 16
kThemeMetricDisclosureButtonHeight = 17
kThemeMetricRoundButtonSize = 18
kThemeMetricPushButtonHeight = 19
kThemeMetricListHeaderHeight = 20
kThemeMetricSmallCheckBoxHeight = 21
kThemeMetricDisclosureButtonWidth = 22
kThemeMetricSmallDisclosureButtonHeight = 23
kThemeMetricSmallDisclosureButtonWidth = 24
kThemeMetricDisclosureTriangleHeight = 25
kThemeMetricDisclosureTriangleWidth = 26
kThemeMetricLittleArrowsHeight = 27
kThemeMetricLittleArrowsWidth = 28
kThemeMetricPaneSplitterHeight = 29
kThemeMetricPopupButtonHeight = 30
kThemeMetricSmallPopupButtonHeight = 31
kThemeMetricLargeProgressBarThickness = 32
kThemeMetricPullDownHeight = 33
kThemeMetricSmallPullDownHeight = 34
kThemeMetricSmallPushButtonHeight = 35
kThemeMetricSmallRadioButtonHeight = 36
kThemeMetricRelevanceIndicatorHeight = 37
kThemeMetricResizeControlHeight = 38
kThemeMetricSmallResizeControlHeight = 39
kThemeMetricLargeRoundButtonSize = 40
kThemeMetricHSliderHeight = 41
kThemeMetricHSliderTickHeight = 42
kThemeMetricSmallHSliderHeight = 43
kThemeMetricSmallHSliderTickHeight = 44
kThemeMetricVSliderWidth = 45
kThemeMetricVSliderTickWidth = 46
kThemeMetricSmallVSliderWidth = 47
kThemeMetricSmallVSliderTickWidth = 48
kThemeMetricTitleBarControlsHeight = 49
kThemeMetricCheckBoxWidth = 50
kThemeMetricSmallCheckBoxWidth = 51
kThemeMetricRadioButtonWidth = 52
kThemeMetricSmallRadioButtonWidth = 53
kThemeMetricSmallHSliderMinThumbWidth = 54
kThemeMetricSmallVSliderMinThumbHeight = 55
kThemeMetricSmallHSliderTickOffset = 56
kThemeMetricSmallVSliderTickOffset = 57
kThemeMetricNormalProgressBarThickness = 58
kThemeMetricProgressBarShadowOutset = 59
kThemeMetricSmallProgressBarShadowOutset = 60
kThemeMetricPrimaryGroupBoxContentInset = 61
kThemeMetricSecondaryGroupBoxContentInset = 62
kThemeMetricMenuMarkColumnWidth = 63
kThemeMetricMenuExcludedMarkColumnWidth = 64
kThemeMetricMenuMarkIndent = 65
kThemeMetricMenuTextLeadingEdgeMargin = 66
kThemeMetricMenuTextTrailingEdgeMargin = 67
kThemeMetricMenuIndentWidth = 68
kThemeMetricMenuIconTrailingEdgeMargin = 69
# appearanceBadBrushIndexErr = themeInvalidBrushErr
# appearanceProcessRegisteredErr = themeProcessRegisteredErr
# appearanceProcessNotRegisteredErr = themeProcessNotRegisteredErr
# appearanceBadTextColorIndexErr = themeBadTextColorErr
# appearanceThemeHasNoAccents = themeHasNoAccentsErr
# appearanceBadCursorIndexErr = themeBadCursorIndexErr
kThemeActiveDialogBackgroundBrush = kThemeBrushDialogBackgroundActive
kThemeInactiveDialogBackgroundBrush = kThemeBrushDialogBackgroundInactive
kThemeActiveAlertBackgroundBrush = kThemeBrushAlertBackgroundActive
kThemeInactiveAlertBackgroundBrush = kThemeBrushAlertBackgroundInactive
kThemeActiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundActive
kThemeInactiveModelessDialogBackgroundBrush = kThemeBrushModelessDialogBackgroundInactive
kThemeActiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundActive
kThemeInactiveUtilityWindowBackgroundBrush = kThemeBrushUtilityWindowBackgroundInactive
kThemeListViewSortColumnBackgroundBrush = kThemeBrushListViewSortColumnBackground
kThemeListViewBackgroundBrush = kThemeBrushListViewBackground
kThemeIconLabelBackgroundBrush = kThemeBrushIconLabelBackground
kThemeListViewSeparatorBrush = kThemeBrushListViewSeparator
kThemeChasingArrowsBrush = kThemeBrushChasingArrows
kThemeDragHiliteBrush = kThemeBrushDragHilite
kThemeDocumentWindowBackgroundBrush = kThemeBrushDocumentWindowBackground
kThemeFinderWindowBackgroundBrush = kThemeBrushFinderWindowBackground
kThemeActiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterActive
kThemeInactiveScrollBarDelimiterBrush = kThemeBrushScrollBarDelimiterInactive
kThemeFocusHighlightBrush = kThemeBrushFocusHighlight
kThemeActivePopupArrowBrush = kThemeBrushPopupArrowActive
kThemePressedPopupArrowBrush = kThemeBrushPopupArrowPressed
kThemeInactivePopupArrowBrush = kThemeBrushPopupArrowInactive
kThemeAppleGuideCoachmarkBrush = kThemeBrushAppleGuideCoachmark
kThemeActiveDialogTextColor = kThemeTextColorDialogActive
kThemeInactiveDialogTextColor = kThemeTextColorDialogInactive
kThemeActiveAlertTextColor = kThemeTextColorAlertActive
kThemeInactiveAlertTextColor = kThemeTextColorAlertInactive
kThemeActiveModelessDialogTextColor = kThemeTextColorModelessDialogActive
kThemeInactiveModelessDialogTextColor = kThemeTextColorModelessDialogInactive
kThemeActiveWindowHeaderTextColor = kThemeTextColorWindowHeaderActive
kThemeInactiveWindowHeaderTextColor = kThemeTextColorWindowHeaderInactive
kThemeActivePlacardTextColor = kThemeTextColorPlacardActive
kThemeInactivePlacardTextColor = kThemeTextColorPlacardInactive
kThemePressedPlacardTextColor = kThemeTextColorPlacardPressed
kThemeActivePushButtonTextColor = kThemeTextColorPushButtonActive
kThemeInactivePushButtonTextColor = kThemeTextColorPushButtonInactive
kThemePressedPushButtonTextColor = kThemeTextColorPushButtonPressed
kThemeActiveBevelButtonTextColor = kThemeTextColorBevelButtonActive
kThemeInactiveBevelButtonTextColor = kThemeTextColorBevelButtonInactive
kThemePressedBevelButtonTextColor = kThemeTextColorBevelButtonPressed
kThemeActivePopupButtonTextColor = kThemeTextColorPopupButtonActive
kThemeInactivePopupButtonTextColor = kThemeTextColorPopupButtonInactive
kThemePressedPopupButtonTextColor = kThemeTextColorPopupButtonPressed
kThemeIconLabelTextColor = kThemeTextColorIconLabel
kThemeListViewTextColor = kThemeTextColorListView
kThemeActiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleActive
kThemeInactiveDocumentWindowTitleTextColor = kThemeTextColorDocumentWindowTitleInactive
kThemeActiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleActive
kThemeInactiveMovableModalWindowTitleTextColor = kThemeTextColorMovableModalWindowTitleInactive
kThemeActiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleActive
kThemeInactiveUtilityWindowTitleTextColor = kThemeTextColorUtilityWindowTitleInactive
kThemeActivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleActive
kThemeInactivePopupWindowTitleColor = kThemeTextColorPopupWindowTitleInactive
kThemeActiveRootMenuTextColor = kThemeTextColorRootMenuActive
kThemeSelectedRootMenuTextColor = kThemeTextColorRootMenuSelected
kThemeDisabledRootMenuTextColor = kThemeTextColorRootMenuDisabled
kThemeActiveMenuItemTextColor = kThemeTextColorMenuItemActive
kThemeSelectedMenuItemTextColor = kThemeTextColorMenuItemSelected
kThemeDisabledMenuItemTextColor = kThemeTextColorMenuItemDisabled
kThemeActivePopupLabelTextColor = kThemeTextColorPopupLabelActive
kThemeInactivePopupLabelTextColor = kThemeTextColorPopupLabelInactive
kAEThemeSwitch = kAEAppearanceChanged
kThemeNoAdornment = kThemeAdornmentNone
kThemeDefaultAdornment = kThemeAdornmentDefault
kThemeFocusAdornment = kThemeAdornmentFocus
kThemeRightToLeftAdornment = kThemeAdornmentRightToLeft
kThemeDrawIndicatorOnly = kThemeAdornmentDrawIndicatorOnly
kThemeBrushPassiveAreaFill = kThemeBrushStaticAreaFill
kThemeMetricCheckBoxGlyphHeight = kThemeMetricCheckBoxHeight
kThemeMetricRadioButtonGlyphHeight = kThemeMetricRadioButtonHeight
kThemeMetricDisclosureButtonSize = kThemeMetricDisclosureButtonHeight
kThemeMetricBestListHeaderHeight = kThemeMetricListHeaderHeight
kThemeMetricSmallProgressBarThickness = kThemeMetricNormalProgressBarThickness
kThemeMetricProgressBarThickness = kThemeMetricLargeProgressBarThickness
kThemeScrollBar = kThemeMediumScrollBar
kThemeSlider = kThemeMediumSlider
kThemeProgressBar = kThemeMediumProgressBar
kThemeIndeterminateBar = kThemeMediumIndeterminateBar
|
mpl-2.0
|
kingmotley/SickRage
|
lib/tornado/test/curl_httpclient_test.py
|
107
|
4477
|
from __future__ import absolute_import, division, print_function, with_statement
from hashlib import md5
from tornado.escape import utf8
from tornado.httpclient import HTTPRequest
from tornado.stack_context import ExceptionStackContext
from tornado.testing import AsyncHTTPTestCase
from tornado.test import httpclient_test
from tornado.test.util import unittest
from tornado.web import Application, RequestHandler
try:
import pycurl
except ImportError:
pycurl = None
if pycurl is not None:
from tornado.curl_httpclient import CurlAsyncHTTPClient
@unittest.skipIf(pycurl is None, "pycurl module not present")
class CurlHTTPClientCommonTestCase(httpclient_test.HTTPClientCommonTestCase):
def get_http_client(self):
client = CurlAsyncHTTPClient(io_loop=self.io_loop,
defaults=dict(allow_ipv6=False))
# make sure AsyncHTTPClient magic doesn't give us the wrong class
self.assertTrue(isinstance(client, CurlAsyncHTTPClient))
return client
class DigestAuthHandler(RequestHandler):
def get(self):
realm = 'test'
opaque = 'asdf'
# Real implementations would use a random nonce.
nonce = "1234"
username = 'foo'
password = 'bar'
auth_header = self.request.headers.get('Authorization', None)
if auth_header is not None:
auth_mode, params = auth_header.split(' ', 1)
assert auth_mode == 'Digest'
param_dict = {}
for pair in params.split(','):
k, v = pair.strip().split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
param_dict[k] = v
assert param_dict['realm'] == realm
assert param_dict['opaque'] == opaque
assert param_dict['nonce'] == nonce
assert param_dict['username'] == username
assert param_dict['uri'] == self.request.path
h1 = md5(utf8('%s:%s:%s' % (username, realm, password))).hexdigest()
h2 = md5(utf8('%s:%s' % (self.request.method,
self.request.path))).hexdigest()
digest = md5(utf8('%s:%s:%s' % (h1, nonce, h2))).hexdigest()
if digest == param_dict['response']:
self.write('ok')
else:
self.write('fail')
else:
self.set_status(401)
self.set_header('WWW-Authenticate',
'Digest realm="%s", nonce="%s", opaque="%s"' %
(realm, nonce, opaque))
class CustomReasonHandler(RequestHandler):
def get(self):
self.set_status(200, "Custom reason")
class CustomFailReasonHandler(RequestHandler):
def get(self):
self.set_status(400, "Custom reason")
@unittest.skipIf(pycurl is None, "pycurl module not present")
class CurlHTTPClientTestCase(AsyncHTTPTestCase):
def setUp(self):
super(CurlHTTPClientTestCase, self).setUp()
self.http_client = CurlAsyncHTTPClient(self.io_loop,
defaults=dict(allow_ipv6=False))
def get_app(self):
return Application([
('/digest', DigestAuthHandler),
('/custom_reason', CustomReasonHandler),
('/custom_fail_reason', CustomFailReasonHandler),
])
def test_prepare_curl_callback_stack_context(self):
exc_info = []
def error_handler(typ, value, tb):
exc_info.append((typ, value, tb))
self.stop()
return True
with ExceptionStackContext(error_handler):
request = HTTPRequest(self.get_url('/'),
prepare_curl_callback=lambda curl: 1 / 0)
self.http_client.fetch(request, callback=self.stop)
self.wait()
self.assertEqual(1, len(exc_info))
self.assertIs(exc_info[0][0], ZeroDivisionError)
def test_digest_auth(self):
response = self.fetch('/digest', auth_mode='digest',
auth_username='foo', auth_password='bar')
self.assertEqual(response.body, b'ok')
def test_custom_reason(self):
response = self.fetch('/custom_reason')
self.assertEqual(response.reason, "Custom reason")
def test_fail_custom_reason(self):
response = self.fetch('/custom_fail_reason')
self.assertEqual(str(response.error), "HTTP 400: Custom reason")
|
gpl-3.0
|
Arcanemagus/plexpy
|
lib/chardet/escprober.py
|
289
|
3950
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetprober import CharSetProber
from .codingstatemachine import CodingStateMachine
from .enums import LanguageFilter, ProbingState, MachineState
from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
ISO2022KR_SM_MODEL)
class EscCharSetProber(CharSetProber):
"""
This CharSetProber uses a "code scheme" approach for detecting encodings,
whereby easily recognizable escape or shift sequences are relied on to
identify these encodings.
"""
def __init__(self, lang_filter=None):
super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
self.coding_sm = []
if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
if self.lang_filter & LanguageFilter.JAPANESE:
self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
if self.lang_filter & LanguageFilter.KOREAN:
self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
self.active_sm_count = None
self._detected_charset = None
self._detected_language = None
self._state = None
self.reset()
def reset(self):
super(EscCharSetProber, self).reset()
for coding_sm in self.coding_sm:
if not coding_sm:
continue
coding_sm.active = True
coding_sm.reset()
self.active_sm_count = len(self.coding_sm)
self._detected_charset = None
self._detected_language = None
@property
def charset_name(self):
return self._detected_charset
@property
def language(self):
return self._detected_language
def get_confidence(self):
if self._detected_charset:
return 0.99
else:
return 0.00
def feed(self, byte_str):
for c in byte_str:
for coding_sm in self.coding_sm:
if not coding_sm or not coding_sm.active:
continue
coding_state = coding_sm.next_state(c)
if coding_state == MachineState.ERROR:
coding_sm.active = False
self.active_sm_count -= 1
if self.active_sm_count <= 0:
self._state = ProbingState.NOT_ME
return self.state
elif coding_state == MachineState.ITS_ME:
self._state = ProbingState.FOUND_IT
self._detected_charset = coding_sm.get_coding_state_machine()
self._detected_language = coding_sm.language
return self.state
return self.state
|
gpl-3.0
|
hobarrera/django
|
tests/m2m_recursive/models.py
|
410
|
1120
|
"""
Many-to-many relationships between the same two tables
In this example, a ``Person`` can have many friends, who are also ``Person``
objects. Friendship is a symmetrical relationship - if I am your friend, you
are my friend. Here, ``friends`` is an example of a symmetrical
``ManyToManyField``.
A ``Person`` can also have many idols - but while I may idolize you, you may
not think the same of me. Here, ``idols`` is an example of a non-symmetrical
``ManyToManyField``. Only recursive ``ManyToManyField`` fields may be
non-symmetrical, and they are symmetrical by default.
This test validates that the many-to-many table is created using a mangled name
if there is a name clash, and tests that symmetry is preserved where
appropriate.
"""
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Person(models.Model):
name = models.CharField(max_length=20)
friends = models.ManyToManyField('self')
idols = models.ManyToManyField('self', symmetrical=False, related_name='stalkers')
def __str__(self):
return self.name
|
bsd-3-clause
|
thresholdsoftware/asylum
|
openerp/addons/account_anglo_saxon/product.py
|
51
|
3427
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class product_category(osv.osv):
_inherit = "product.category"
_columns = {
'property_account_creditor_price_difference_categ': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Price Difference Account",
view_load=True,
help="This account will be used to value price difference between purchase price and cost price."),
#Redefine fields to change help text for anglo saxon methodology.
'property_account_income_categ': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Income Account",
view_load=True,
help="This account will be used to value outgoing stock using sale price."),
'property_account_expense_categ': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Expense Account",
view_load=True,
help="This account will be used to value outgoing stock using cost price."),
}
product_category()
class product_template(osv.osv):
_inherit = "product.template"
_columns = {
'property_account_creditor_price_difference': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Price Difference Account",
view_load=True,
help="This account will be used to value price difference between purchase price and cost price."),
#Redefine fields to change help text for anglo saxon methodology.
'property_account_income': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Income Account",
view_load=True,
help="This account will be used to value outgoing stock using sale price."),
'property_account_expense': fields.property(
'account.account',
type='many2one',
relation='account.account',
string="Expense Account",
view_load=True,
help="This account will be used to value outgoing stock using cost price."),
}
product_template()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
ping/youtube-dl
|
youtube_dl/extractor/cbssports.py
|
22
|
1486
|
from __future__ import unicode_literals
from .cbs import CBSBaseIE
class CBSSportsIE(CBSBaseIE):
_VALID_URL = r'https?://(?:www\.)?cbssports\.com/[^/]+/(?:video|news)/(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'https://www.cbssports.com/nba/video/donovan-mitchell-flashes-star-potential-in-game-2-victory-over-thunder/',
'info_dict': {
'id': '1214315075735',
'ext': 'mp4',
'title': 'Donovan Mitchell flashes star potential in Game 2 victory over Thunder',
'description': 'md5:df6f48622612c2d6bd2e295ddef58def',
'timestamp': 1524111457,
'upload_date': '20180419',
'uploader': 'CBSI-NEW',
},
'params': {
# m3u8 download
'skip_download': True,
}
}, {
'url': 'https://www.cbssports.com/nba/news/nba-playoffs-2018-watch-76ers-vs-heat-game-3-series-schedule-tv-channel-online-stream/',
'only_matching': True,
}]
def _extract_video_info(self, filter_query, video_id):
return self._extract_feed_info('dJ5BDC', 'VxxJg8Ymh8sE', filter_query, video_id)
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
video_id = self._search_regex(
[r'(?:=|%26)pcid%3D(\d+)', r'embedVideo(?:Container)?_(\d+)'],
webpage, 'video id')
return self._extract_video_info('byId=%s' % video_id, video_id)
|
unlicense
|
wadadaaa/marta
|
config/settings/local.py
|
1
|
1961
|
# -*- coding: utf-8 -*-
'''
Local settings
- Run in Debug mode
- Use console backend for emails
- Add Django Debug Toolbar
- Add django-extensions as app
'''
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
DEBUG = env.bool('DJANGO_DEBUG', default=True)
TEMPLATES[0]['OPTIONS']['debug'] = DEBUG
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env("DJANGO_SECRET_KEY", default='CHANGEME!!!ahigrfdmd-4q51ef4d9t(4+1r-nayd=4^wi+$a9+!%rds@m@7v')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND',
default='django.core.mail.backends.console.EmailBackend')
# CACHING
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# django-debug-toolbar
# ------------------------------------------------------------------------------
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
INSTALLED_APPS += ('debug_toolbar', )
INTERNAL_IPS = ('127.0.0.1', '10.0.2.2',)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
}
# django-extensions
# ------------------------------------------------------------------------------
INSTALLED_APPS += ('django_extensions', )
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Your local stuff: Below this line define 3rd party library settings
|
bsd-3-clause
|
pshchelo/heat
|
contrib/heat_keystone/heat_keystone/resources/service.py
|
2
|
3670
|
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from heat.common.i18n import _
from heat.engine import properties
from heat.engine import resource
from heat.engine import support
class KeystoneService(resource.Resource):
"""Heat Template Resource for Keystone Service."""
support_status = support.SupportStatus(
version='2015.2',
message=_('Supported versions: keystone v3'))
PROPERTIES = (
NAME, DESCRIPTION, TYPE
) = (
'name', 'description', 'type'
)
properties_schema = {
NAME: properties.Schema(
properties.Schema.STRING,
_('Name of keystone service.'),
update_allowed=True
),
DESCRIPTION: properties.Schema(
properties.Schema.STRING,
_('Description of keystone service.'),
update_allowed=True
),
TYPE: properties.Schema(
properties.Schema.STRING,
_('Type of keystone Service.'),
update_allowed=True,
required=True
)
}
def _create_service(self,
name,
type,
description=None):
return self.keystone().client.services.create(
name=name,
description=description,
type=type)
def _delete_service(self, service_id):
return self.keystone().client.services.delete(service_id)
def _update_service(self,
service_id,
new_name=None,
new_description=None,
new_type=None):
return self.keystone().client.services.update(
service=service_id,
name=new_name,
description=new_description,
type=new_type)
def handle_create(self):
name = (self.properties.get(self.NAME) or
self.physical_resource_name())
description = self.properties.get(self.DESCRIPTION)
type = self.properties.get(self.TYPE)
service = self._create_service(
name=name,
description=description,
type=type
)
self.resource_id_set(service.id)
def handle_update(self,
json_snippet=None,
tmpl_diff=None,
prop_diff=None):
name = None
if self.NAME in prop_diff:
name = (prop_diff.get(self.NAME) or
self.physical_resource_name())
description = prop_diff.get(self.DESCRIPTION)
type = prop_diff.get(self.TYPE)
self._update_service(
service_id=self.resource_id,
new_name=name,
new_description=description,
new_type=type
)
def handle_delete(self):
if self.resource_id is not None:
try:
self._delete_service(service_id=self.resource_id)
except Exception as ex:
self.client_plugin('keystone').ignore_not_found(ex)
def resource_mapping():
return {
'OS::Keystone::Service': KeystoneService
}
|
apache-2.0
|
kingland/go-v8
|
v8-3.28/build/gyp/test/intermediate_dir/gyptest-intermediate-dir.py
|
243
|
1398
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that targets have independent INTERMEDIATE_DIRs.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('test.gyp', chdir='src')
test.build('test.gyp', 'target1', chdir='src')
# Check stuff exists.
intermediate_file1 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
shared_intermediate_file1 = test.read('src/shared_outfile.txt')
test.must_contain(shared_intermediate_file1, 'shared_target1')
test.run_gyp('test2.gyp', chdir='src')
# Force the shared intermediate to be rebuilt.
test.sleep()
test.touch('src/shared_infile.txt')
test.build('test2.gyp', 'target2', chdir='src')
# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
# file did.
intermediate_file2 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
test.must_contain(intermediate_file2, 'target2')
shared_intermediate_file2 = test.read('src/shared_outfile.txt')
if shared_intermediate_file1 != shared_intermediate_file2:
test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
test.must_contain(shared_intermediate_file1, 'shared_target2')
test.must_contain(shared_intermediate_file2, 'shared_target2')
test.pass_test()
|
mit
|
gautamMalu/rootfs_xen_arndale
|
usr/lib/python2.7/io.py
|
53
|
3321
|
"""The io module provides the Python interfaces to stream handling. The
builtin open function is defined in this module.
At the top of the I/O hierarchy is the abstract base class IOBase. It
defines the basic interface to a stream. Note, however, that there is no
separation between reading and writing to streams; implementations are
allowed to raise an IOError if they do not support a given operation.
Extending IOBase is RawIOBase which deals simply with the reading and
writing of raw bytes to a stream. FileIO subclasses RawIOBase to provide
an interface to OS files.
BufferedIOBase deals with buffering on a raw byte stream (RawIOBase). Its
subclasses, BufferedWriter, BufferedReader, and BufferedRWPair buffer
streams that are readable, writable, and both respectively.
BufferedRandom provides a buffered interface to random access
streams. BytesIO is a simple stream of in-memory bytes.
Another IOBase subclass, TextIOBase, deals with the encoding and decoding
of streams into text. TextIOWrapper, which extends it, is a buffered text
interface to a buffered raw stream (`BufferedIOBase`). Finally, StringIO
is a in-memory stream for text.
Argument names are not part of the specification, and only the arguments
of open() are intended to be used as keyword arguments.
data:
DEFAULT_BUFFER_SIZE
An int containing the default buffer size used by the module's buffered
I/O classes. open() uses the file's blksize (as obtained by os.stat) if
possible.
"""
# New I/O library conforming to PEP 3116.
__author__ = ("Guido van Rossum <guido@python.org>, "
"Mike Verdone <mike.verdone@gmail.com>, "
"Mark Russell <mark.russell@zen.co.uk>, "
"Antoine Pitrou <solipsis@pitrou.net>, "
"Amaury Forgeot d'Arc <amauryfa@gmail.com>, "
"Benjamin Peterson <benjamin@python.org>")
__all__ = ["BlockingIOError", "open", "IOBase", "RawIOBase", "FileIO",
"BytesIO", "StringIO", "BufferedIOBase",
"BufferedReader", "BufferedWriter", "BufferedRWPair",
"BufferedRandom", "TextIOBase", "TextIOWrapper",
"UnsupportedOperation", "SEEK_SET", "SEEK_CUR", "SEEK_END"]
import _io
import abc
from _io import (DEFAULT_BUFFER_SIZE, BlockingIOError, UnsupportedOperation,
open, FileIO, BytesIO, StringIO, BufferedReader,
BufferedWriter, BufferedRWPair, BufferedRandom,
IncrementalNewlineDecoder, TextIOWrapper)
OpenWrapper = _io.open # for compatibility with _pyio
# for seek()
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
# Declaring ABCs in C is tricky so we do it here.
# Method descriptions and default implementations are inherited from the C
# version however.
class IOBase(_io._IOBase):
__metaclass__ = abc.ABCMeta
__doc__ = _io._IOBase.__doc__
class RawIOBase(_io._RawIOBase, IOBase):
__doc__ = _io._RawIOBase.__doc__
class BufferedIOBase(_io._BufferedIOBase, IOBase):
__doc__ = _io._BufferedIOBase.__doc__
class TextIOBase(_io._TextIOBase, IOBase):
__doc__ = _io._TextIOBase.__doc__
RawIOBase.register(FileIO)
for klass in (BytesIO, BufferedReader, BufferedWriter, BufferedRandom,
BufferedRWPair):
BufferedIOBase.register(klass)
for klass in (StringIO, TextIOWrapper):
TextIOBase.register(klass)
del klass
|
gpl-2.0
|
SaschaMester/delicium
|
sync/tools/testserver/sync_testserver.py
|
24
|
20023
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This is a python sync server used for testing Chrome Sync.
By default, it listens on an ephemeral port and xmpp_port and sends the port
numbers back to the originating process over a pipe. The originating process can
specify an explicit port and xmpp_port if necessary.
"""
import asyncore
import BaseHTTPServer
import errno
import os
import select
import socket
import sys
import urlparse
import chromiumsync
import echo_message
import testserver_base
import xmppserver
class SyncHTTPServer(testserver_base.ClientRestrictingServerMixIn,
testserver_base.BrokenPipeHandlerMixIn,
testserver_base.StoppableHTTPServer):
"""An HTTP server that handles sync commands."""
def __init__(self, server_address, xmpp_port, request_handler_class):
testserver_base.StoppableHTTPServer.__init__(self,
server_address,
request_handler_class)
self._sync_handler = chromiumsync.TestServer()
self._xmpp_socket_map = {}
self._xmpp_server = xmppserver.XmppServer(
self._xmpp_socket_map, ('localhost', xmpp_port))
self.xmpp_port = self._xmpp_server.getsockname()[1]
self.authenticated = True
def GetXmppServer(self):
return self._xmpp_server
def HandleCommand(self, query, raw_request):
return self._sync_handler.HandleCommand(query, raw_request)
def HandleRequestNoBlock(self):
"""Handles a single request.
Copied from SocketServer._handle_request_noblock().
"""
try:
request, client_address = self.get_request()
except socket.error:
return
if self.verify_request(request, client_address):
try:
self.process_request(request, client_address)
except Exception:
self.handle_error(request, client_address)
self.close_request(request)
def SetAuthenticated(self, auth_valid):
self.authenticated = auth_valid
def GetAuthenticated(self):
return self.authenticated
def handle_request(self):
"""Adaptation of asyncore.loop"""
def HandleXmppSocket(fd, socket_map, handler):
"""Runs the handler for the xmpp connection for fd.
Adapted from asyncore.read() et al.
"""
xmpp_connection = socket_map.get(fd)
# This could happen if a previous handler call caused fd to get
# removed from socket_map.
if xmpp_connection is None:
return
try:
handler(xmpp_connection)
except (asyncore.ExitNow, KeyboardInterrupt, SystemExit):
raise
except:
xmpp_connection.handle_error()
read_fds = [ self.fileno() ]
write_fds = []
exceptional_fds = []
for fd, xmpp_connection in self._xmpp_socket_map.items():
is_r = xmpp_connection.readable()
is_w = xmpp_connection.writable()
if is_r:
read_fds.append(fd)
if is_w:
write_fds.append(fd)
if is_r or is_w:
exceptional_fds.append(fd)
try:
read_fds, write_fds, exceptional_fds = (
select.select(read_fds, write_fds, exceptional_fds))
except select.error, err:
if err.args[0] != errno.EINTR:
raise
else:
return
for fd in read_fds:
if fd == self.fileno():
self.HandleRequestNoBlock()
return
HandleXmppSocket(fd, self._xmpp_socket_map,
asyncore.dispatcher.handle_read_event)
for fd in write_fds:
HandleXmppSocket(fd, self._xmpp_socket_map,
asyncore.dispatcher.handle_write_event)
for fd in exceptional_fds:
HandleXmppSocket(fd, self._xmpp_socket_map,
asyncore.dispatcher.handle_expt_event)
class SyncPageHandler(testserver_base.BasePageHandler):
"""Handler for the main HTTP sync server."""
def __init__(self, request, client_address, sync_http_server):
get_handlers = [self.ChromiumSyncTimeHandler,
self.ChromiumSyncMigrationOpHandler,
self.ChromiumSyncCredHandler,
self.ChromiumSyncXmppCredHandler,
self.ChromiumSyncDisableNotificationsOpHandler,
self.ChromiumSyncEnableNotificationsOpHandler,
self.ChromiumSyncSendNotificationOpHandler,
self.ChromiumSyncBirthdayErrorOpHandler,
self.ChromiumSyncTransientErrorOpHandler,
self.ChromiumSyncErrorOpHandler,
self.ChromiumSyncSyncTabFaviconsOpHandler,
self.ChromiumSyncCreateSyncedBookmarksOpHandler,
self.ChromiumSyncEnableKeystoreEncryptionOpHandler,
self.ChromiumSyncRotateKeystoreKeysOpHandler,
self.ChromiumSyncEnableManagedUserAcknowledgementHandler,
self.ChromiumSyncEnablePreCommitGetUpdateAvoidanceHandler,
self.GaiaOAuth2TokenHandler,
self.GaiaSetOAuth2TokenResponseHandler,
self.CustomizeClientCommandHandler]
post_handlers = [self.ChromiumSyncCommandHandler,
self.ChromiumSyncTimeHandler,
self.GaiaOAuth2TokenHandler,
self.GaiaSetOAuth2TokenResponseHandler]
testserver_base.BasePageHandler.__init__(self, request, client_address,
sync_http_server, [], get_handlers,
[], post_handlers, [])
def ChromiumSyncTimeHandler(self):
"""Handle Chromium sync .../time requests.
The syncer sometimes checks server reachability by examining /time.
"""
test_name = "/chromiumsync/time"
if not self._ShouldHandleRequest(test_name):
return False
# Chrome hates it if we send a response before reading the request.
if self.headers.getheader('content-length'):
length = int(self.headers.getheader('content-length'))
_raw_request = self.rfile.read(length)
self.send_response(200)
self.send_header('Content-Type', 'text/plain')
self.end_headers()
self.wfile.write('0123456789')
return True
def ChromiumSyncCommandHandler(self):
"""Handle a chromiumsync command arriving via http.
This covers all sync protocol commands: authentication, getupdates, and
commit.
"""
test_name = "/chromiumsync/command"
if not self._ShouldHandleRequest(test_name):
return False
length = int(self.headers.getheader('content-length'))
raw_request = self.rfile.read(length)
http_response = 200
raw_reply = None
if not self.server.GetAuthenticated():
http_response = 401
challenge = 'GoogleLogin realm="http://%s", service="chromiumsync"' % (
self.server.server_address[0])
else:
http_response, raw_reply = self.server.HandleCommand(
self.path, raw_request)
### Now send the response to the client. ###
self.send_response(http_response)
if http_response == 401:
self.send_header('www-Authenticate', challenge)
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncMigrationOpHandler(self):
test_name = "/chromiumsync/migrate"
if not self._ShouldHandleRequest(test_name):
return False
http_response, raw_reply = self.server._sync_handler.HandleMigrate(
self.path)
self.send_response(http_response)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncCredHandler(self):
test_name = "/chromiumsync/cred"
if not self._ShouldHandleRequest(test_name):
return False
try:
query = urlparse.urlparse(self.path)[4]
cred_valid = urlparse.parse_qs(query)['valid']
if cred_valid[0] == 'True':
self.server.SetAuthenticated(True)
else:
self.server.SetAuthenticated(False)
except Exception:
self.server.SetAuthenticated(False)
http_response = 200
raw_reply = 'Authenticated: %s ' % self.server.GetAuthenticated()
self.send_response(http_response)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncXmppCredHandler(self):
test_name = "/chromiumsync/xmppcred"
if not self._ShouldHandleRequest(test_name):
return False
xmpp_server = self.server.GetXmppServer()
try:
query = urlparse.urlparse(self.path)[4]
cred_valid = urlparse.parse_qs(query)['valid']
if cred_valid[0] == 'True':
xmpp_server.SetAuthenticated(True)
else:
xmpp_server.SetAuthenticated(False)
except:
xmpp_server.SetAuthenticated(False)
http_response = 200
raw_reply = 'XMPP Authenticated: %s ' % xmpp_server.GetAuthenticated()
self.send_response(http_response)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncDisableNotificationsOpHandler(self):
test_name = "/chromiumsync/disablenotifications"
if not self._ShouldHandleRequest(test_name):
return False
self.server.GetXmppServer().DisableNotifications()
result = 200
raw_reply = ('<html><title>Notifications disabled</title>'
'<H1>Notifications disabled</H1></html>')
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncEnableNotificationsOpHandler(self):
test_name = "/chromiumsync/enablenotifications"
if not self._ShouldHandleRequest(test_name):
return False
self.server.GetXmppServer().EnableNotifications()
result = 200
raw_reply = ('<html><title>Notifications enabled</title>'
'<H1>Notifications enabled</H1></html>')
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncSendNotificationOpHandler(self):
test_name = "/chromiumsync/sendnotification"
if not self._ShouldHandleRequest(test_name):
return False
query = urlparse.urlparse(self.path)[4]
query_params = urlparse.parse_qs(query)
channel = ''
data = ''
if 'channel' in query_params:
channel = query_params['channel'][0]
if 'data' in query_params:
data = query_params['data'][0]
self.server.GetXmppServer().SendNotification(channel, data)
result = 200
raw_reply = ('<html><title>Notification sent</title>'
'<H1>Notification sent with channel "%s" '
'and data "%s"</H1></html>'
% (channel, data))
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncBirthdayErrorOpHandler(self):
test_name = "/chromiumsync/birthdayerror"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = self.server._sync_handler.HandleCreateBirthdayError()
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncTransientErrorOpHandler(self):
test_name = "/chromiumsync/transienterror"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = self.server._sync_handler.HandleSetTransientError()
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncErrorOpHandler(self):
test_name = "/chromiumsync/error"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = self.server._sync_handler.HandleSetInducedError(
self.path)
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncSyncTabFaviconsOpHandler(self):
test_name = "/chromiumsync/synctabfavicons"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = self.server._sync_handler.HandleSetSyncTabFavicons()
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncCreateSyncedBookmarksOpHandler(self):
test_name = "/chromiumsync/createsyncedbookmarks"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = self.server._sync_handler.HandleCreateSyncedBookmarks()
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncEnableKeystoreEncryptionOpHandler(self):
test_name = "/chromiumsync/enablekeystoreencryption"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = (
self.server._sync_handler.HandleEnableKeystoreEncryption())
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncRotateKeystoreKeysOpHandler(self):
test_name = "/chromiumsync/rotatekeystorekeys"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = (
self.server._sync_handler.HandleRotateKeystoreKeys())
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncEnableManagedUserAcknowledgementHandler(self):
test_name = "/chromiumsync/enablemanageduseracknowledgement"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = (
self.server._sync_handler.HandleEnableManagedUserAcknowledgement())
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def ChromiumSyncEnablePreCommitGetUpdateAvoidanceHandler(self):
test_name = "/chromiumsync/enableprecommitgetupdateavoidance"
if not self._ShouldHandleRequest(test_name):
return False
result, raw_reply = (
self.server._sync_handler.HandleEnablePreCommitGetUpdateAvoidance())
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def GaiaOAuth2TokenHandler(self):
test_name = "/o/oauth2/token"
if not self._ShouldHandleRequest(test_name):
return False
if self.headers.getheader('content-length'):
length = int(self.headers.getheader('content-length'))
_raw_request = self.rfile.read(length)
result, raw_reply = (
self.server._sync_handler.HandleGetOauth2Token())
self.send_response(result)
self.send_header('Content-Type', 'application/json')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def GaiaSetOAuth2TokenResponseHandler(self):
test_name = "/setfakeoauth2token"
if not self._ShouldHandleRequest(test_name):
return False
# The index of 'query' is 4.
# See http://docs.python.org/2/library/urlparse.html
query = urlparse.urlparse(self.path)[4]
query_params = urlparse.parse_qs(query)
response_code = 0
request_token = ''
access_token = ''
expires_in = 0
token_type = ''
if 'response_code' in query_params:
response_code = query_params['response_code'][0]
if 'request_token' in query_params:
request_token = query_params['request_token'][0]
if 'access_token' in query_params:
access_token = query_params['access_token'][0]
if 'expires_in' in query_params:
expires_in = query_params['expires_in'][0]
if 'token_type' in query_params:
token_type = query_params['token_type'][0]
result, raw_reply = (
self.server._sync_handler.HandleSetOauth2Token(
response_code, request_token, access_token, expires_in, token_type))
self.send_response(result)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(raw_reply))
self.end_headers()
self.wfile.write(raw_reply)
return True
def CustomizeClientCommandHandler(self):
test_name = "/customizeclientcommand"
if not self._ShouldHandleRequest(test_name):
return False
query = urlparse.urlparse(self.path)[4]
query_params = urlparse.parse_qs(query)
if 'sessions_commit_delay_seconds' in query_params:
sessions_commit_delay = query_params['sessions_commit_delay_seconds'][0]
try:
command_string = self.server._sync_handler.CustomizeClientCommand(
int(sessions_commit_delay))
response_code = 200
reply = "The ClientCommand was customized:\n\n"
reply += "<code>{}</code>.".format(command_string)
except ValueError:
response_code = 400
reply = "sessions_commit_delay_seconds was not an int"
else:
response_code = 400
reply = "sessions_commit_delay_seconds is required"
self.send_response(response_code)
self.send_header('Content-Type', 'text/html')
self.send_header('Content-Length', len(reply))
self.end_headers()
self.wfile.write(reply)
return True
class SyncServerRunner(testserver_base.TestServerRunner):
"""TestServerRunner for the net test servers."""
def __init__(self):
super(SyncServerRunner, self).__init__()
def create_server(self, server_data):
port = self.options.port
host = self.options.host
xmpp_port = self.options.xmpp_port
server = SyncHTTPServer((host, port), xmpp_port, SyncPageHandler)
print ('Sync HTTP server started at %s:%d/chromiumsync...' %
(host, server.server_port))
print ('Fake OAuth2 Token server started at %s:%d/o/oauth2/token...' %
(host, server.server_port))
print ('Sync XMPP server started at %s:%d...' %
(host, server.xmpp_port))
server_data['port'] = server.server_port
server_data['xmpp_port'] = server.xmpp_port
return server
def run_server(self):
testserver_base.TestServerRunner.run_server(self)
def add_options(self):
testserver_base.TestServerRunner.add_options(self)
self.option_parser.add_option('--xmpp-port', default='0', type='int',
help='Port used by the XMPP server. If '
'unspecified, the XMPP server will listen on '
'an ephemeral port.')
# Override the default logfile name used in testserver.py.
self.option_parser.set_defaults(log_file='sync_testserver.log')
if __name__ == '__main__':
sys.exit(SyncServerRunner().main())
|
bsd-3-clause
|
jasondunsmore/heat
|
heat/tests/openstack/neutron/lbaas/test_health_monitor.py
|
2
|
7098
|
#
# Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from neutronclient.common import exceptions
from heat.common import template_format
from heat.engine.resources.openstack.neutron.lbaas import health_monitor
from heat.tests import common
from heat.tests.openstack.neutron import inline_templates
from heat.tests import utils
class HealthMonitorTest(common.HeatTestCase):
def setUp(self):
super(HealthMonitorTest, self).setUp()
@mock.patch('heat.engine.clients.os.neutron.'
'NeutronClientPlugin.has_extension', return_value=True)
def _create_stack(self, ext_func, tmpl=inline_templates.MONITOR_TEMPLATE):
self.t = template_format.parse(tmpl)
self.stack = utils.parse_stack(self.t)
self.healthmonitor = self.stack['monitor']
self.neutron_client = mock.MagicMock()
self.healthmonitor.client = mock.MagicMock(
return_value=self.neutron_client)
self.healthmonitor.client_plugin().find_resourceid_by_name_or_id = (
mock.MagicMock(return_value='123'))
self.healthmonitor.client_plugin().client = mock.MagicMock(
return_value=self.neutron_client)
def test_resource_mapping(self):
mapping = health_monitor.resource_mapping()
self.assertEqual(health_monitor.HealthMonitor,
mapping['OS::Neutron::LBaaS::HealthMonitor'])
def test_create(self):
self._create_stack()
self.neutron_client.show_loadbalancer.side_effect = [
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'ACTIVE'}},
]
self.neutron_client.create_lbaas_healthmonitor.side_effect = [
exceptions.StateInvalidClient,
{'healthmonitor': {'id': '1234'}}
]
expected = {
'healthmonitor': {
'admin_state_up': True,
'delay': 3,
'expected_codes': '200-202',
'http_method': 'HEAD',
'max_retries': 5,
'pool_id': '123',
'timeout': 10,
'type': 'HTTP',
'url_path': '/health'
}
}
props = self.healthmonitor.handle_create()
self.assertFalse(self.healthmonitor.check_create_complete(props))
self.neutron_client.create_lbaas_healthmonitor.assert_called_with(
expected)
self.assertFalse(self.healthmonitor.check_create_complete(props))
self.neutron_client.create_lbaas_healthmonitor.assert_called_with(
expected)
self.assertFalse(self.healthmonitor.check_create_complete(props))
self.assertTrue(self.healthmonitor.check_create_complete(props))
def test_show_resource(self):
self._create_stack()
self.healthmonitor.resource_id_set('1234')
self.assertTrue(self.healthmonitor._show_resource())
self.neutron_client.show_lbaas_healthmonitor.assert_called_with(
'1234')
def test_update(self):
self._create_stack()
self.healthmonitor.resource_id_set('1234')
self.neutron_client.show_loadbalancer.side_effect = [
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'ACTIVE'}},
]
self.neutron_client.update_lbaas_healthmonitor.side_effect = [
exceptions.StateInvalidClient, None]
prop_diff = {
'admin_state_up': False,
}
prop_diff = self.healthmonitor.handle_update(None, None, prop_diff)
self.assertFalse(self.healthmonitor.check_update_complete(prop_diff))
self.assertFalse(self.healthmonitor._update_called)
self.neutron_client.update_lbaas_healthmonitor.assert_called_with(
'1234', {'healthmonitor': prop_diff})
self.assertFalse(self.healthmonitor.check_update_complete(prop_diff))
self.assertTrue(self.healthmonitor._update_called)
self.neutron_client.update_lbaas_healthmonitor.assert_called_with(
'1234', {'healthmonitor': prop_diff})
self.assertFalse(self.healthmonitor.check_update_complete(prop_diff))
self.assertTrue(self.healthmonitor.check_update_complete(prop_diff))
def test_delete(self):
self._create_stack()
self.healthmonitor.resource_id_set('1234')
self.neutron_client.show_loadbalancer.side_effect = [
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'PENDING_UPDATE'}},
{'loadbalancer': {'provisioning_status': 'ACTIVE'}},
]
self.neutron_client.delete_lbaas_healthmonitor.side_effect = [
exceptions.StateInvalidClient, None]
self.healthmonitor.handle_delete()
self.assertFalse(self.healthmonitor.check_delete_complete(None))
self.assertFalse(self.healthmonitor._delete_called)
self.neutron_client.delete_lbaas_healthmonitor.assert_called_with(
'1234')
self.assertFalse(self.healthmonitor.check_delete_complete(None))
self.assertTrue(self.healthmonitor._delete_called)
self.neutron_client.delete_lbaas_healthmonitor.assert_called_with(
'1234')
self.assertFalse(self.healthmonitor.check_delete_complete(None))
self.assertTrue(self.healthmonitor.check_delete_complete(None))
def test_delete_already_gone(self):
self._create_stack()
self.healthmonitor.resource_id_set('1234')
self.neutron_client.delete_lbaas_healthmonitor.side_effect = (
exceptions.NotFound)
self.healthmonitor.handle_delete()
self.assertTrue(self.healthmonitor.check_delete_complete(None))
self.neutron_client.delete_lbaas_healthmonitor.assert_called_with(
'1234')
def test_delete_failed(self):
self._create_stack()
self.healthmonitor.resource_id_set('1234')
self.neutron_client.delete_lbaas_healthmonitor.side_effect = (
exceptions.Unauthorized)
self.healthmonitor.handle_delete()
self.assertRaises(exceptions.Unauthorized,
self.healthmonitor.check_delete_complete, None)
self.neutron_client.delete_lbaas_healthmonitor.assert_called_with(
'1234')
|
apache-2.0
|
bergzand/RIOT-graphs
|
riot-graph.py
|
1
|
2915
|
#!/usr/bin/env python3
import logging
from logging import StreamHandler
from docopt import docopt
from riot_graphs.rg import RiotGraph
from riot_graphs import server
def fetch(args, graphs):
days = None
if args['--days']:
try:
days = int(args['--days'])
except:
raise SystemExit('days in the past should be a positive integer')
history = None
if args['--history']:
try:
history = int(args['--history'])
except:
raise SystemExit('history should be an integer')
if history:
logging.info("Fetching build history since {}"
" days in the past".format(history))
graphs.retrieve_history(history)
elif days:
logging.info("Fetching build information from {}"
" days in the past".format(days))
graphs.push_last_of_day(days)
else:
logging.info("Fetching the latest build information")
graphs.push_last_of_day(0)
def run_server(args, graphs):
srv = server.RiotServer(args, graphs)
srv.run()
def main():
usage = """
Usage: riot-graph.py fetch [--cron|--debug] [--history=<N>|--days=<N>]
[--noop] <config>
riot-graph.py server [--quiet|--debug] [--host=<host>] [--port=<port>]
[--noop] <config>
riot-graph.py -V
riot-graph.py -h
Options:
-h, --help Display this usage info
-V, --version Display version and exit
fetch One time fetching results
server Start micro service
config Path to configuration file
-D, --debug Enable debug output
-C, --cron Mute all logging except warnings and errors
-H, --history=<N> Try to retrieve the full measurement history
starting at day N in the past
--host=<host> Host for the server to bind on [default: ::1]
--port=<port> Port to listen on [default: 8080]
-d, --days=<N> Retrieve day N in the past from now
-n, --noop Don't write anything to the database
"""
args = docopt(usage, version="0.1")
loglevel = logging.INFO
if args['--cron']:
loglevel = logging.WARNING
elif args['--debug']:
loglevel = logging.DEBUG
# Initialize logger as a syslogger
logger = logging.getLogger()
logger.setLevel(loglevel)
streamlogger = StreamHandler()
streamlogger.setLevel(loglevel)
logger.addHandler(streamlogger)
# Parse configuration file
graphs = RiotGraph(args['<config>'])
graphs.set_noop(args['--noop'])
if args['fetch']:
fetch(args, graphs)
elif args['server']:
run_server(args, graphs)
else:
pass
if __name__ == '__main__':
main()
|
gpl-3.0
|
PhilSk/zulip
|
contrib_bots/bots/encrypt/tests.py
|
15
|
1168
|
import encrypt_bot
def test():
for cmd, expected_response in sample_conversation():
message = {'content': cmd, 'subject': 'foo',
'display_recipient': 'bar'}
class ClientDummy(object):
def __init__(self):
self.output = ''
def send_message(self, params):
self.output = params['content']
handler = encrypt_bot.EncryptHandler()
client_dummy = ClientDummy()
handler.handle_message(message, client_dummy, '')
if client_dummy.output != expected_response:
raise AssertionError('''
cmd: %s
expected: %s
but got : %s
''' % (cmd, expected_response, client_dummy.output))
def sample_conversation():
return [
('@encrypt Please encrypt this', 'Encrypted/Decrypted text: Cyrnfr rapelcg guvf'),
('@encrypt Let\'s Do It', 'Encrypted/Decrypted text: Yrg\'f Qb Vg'),
('@encrypt ', 'Encrypted/Decrypted text: '),
('@encrypt me&mom together..!!', 'Encrypted/Decrypted text: zr&zbz gbtrgure..!!'),
]
if __name__ == '__main__':
test()
|
apache-2.0
|
alrusdi/lettuce
|
tests/integration/lib/Django-1.3/tests/modeltests/update/tests.py
|
89
|
4252
|
from django.test import TestCase
from models import A, B, C, D, DataPoint, RelatedPoint
class SimpleTest(TestCase):
def setUp(self):
self.a1 = A.objects.create()
self.a2 = A.objects.create()
for x in range(20):
B.objects.create(a=self.a1)
D.objects.create(a=self.a1)
def test_nonempty_update(self):
"""
Test that update changes the right number of rows for a nonempty queryset
"""
num_updated = self.a1.b_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update(self):
"""
Test that update changes the right number of rows for an empty queryset
"""
num_updated = self.a2.b_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = B.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
def test_nonempty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a1.d_set.update(y=100)
self.assertEqual(num_updated, 20)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 20)
def test_empty_update_with_inheritance(self):
"""
Test that update changes the right number of rows for an empty queryset
when the update affects only a base table
"""
num_updated = self.a2.d_set.update(y=100)
self.assertEqual(num_updated, 0)
cnt = D.objects.filter(y=100).count()
self.assertEqual(cnt, 0)
class AdvancedTests(TestCase):
def setUp(self):
self.d0 = DataPoint.objects.create(name="d0", value="apple")
self.d2 = DataPoint.objects.create(name="d2", value="banana")
self.d3 = DataPoint.objects.create(name="d3", value="banana")
self.r1 = RelatedPoint.objects.create(name="r1", data=self.d3)
def test_update(self):
"""
Objects are updated by first filtering the candidates into a queryset
and then calling the update() method. It executes immediately and
returns nothing.
"""
resp = DataPoint.objects.filter(value="apple").update(name="d1")
self.assertEqual(resp, 1)
resp = DataPoint.objects.filter(value="apple")
self.assertEqual(list(resp), [self.d0])
def test_update_multiple_objects(self):
"""
We can update multiple objects at once.
"""
resp = DataPoint.objects.filter(value="banana").update(
value="pineapple")
self.assertEqual(resp, 2)
self.assertEqual(DataPoint.objects.get(name="d2").value, u'pineapple')
def test_update_fk(self):
"""
Foreign key fields can also be updated, although you can only update
the object referred to, not anything inside the related object.
"""
resp = RelatedPoint.objects.filter(name="r1").update(data=self.d0)
self.assertEqual(resp, 1)
resp = RelatedPoint.objects.filter(data__name="d0")
self.assertEqual(list(resp), [self.r1])
def test_update_multiple_fields(self):
"""
Multiple fields can be updated at once
"""
resp = DataPoint.objects.filter(value="apple").update(
value="fruit", another_value="peach")
self.assertEqual(resp, 1)
d = DataPoint.objects.get(name="d0")
self.assertEqual(d.value, u'fruit')
self.assertEqual(d.another_value, u'peach')
def test_update_all(self):
"""
In the rare case you want to update every instance of a model, update()
is also a manager method.
"""
self.assertEqual(DataPoint.objects.update(value='thing'), 3)
resp = DataPoint.objects.values('value').distinct()
self.assertEqual(list(resp), [{'value': u'thing'}])
def test_update_slice_fail(self):
"""
We do not support update on already sliced query sets.
"""
method = DataPoint.objects.all()[:2].update
self.assertRaises(AssertionError, method,
another_value='another thing')
|
gpl-3.0
|
subodhchhabra/airflow
|
airflow/utils/operator_resources.py
|
9
|
3778
|
# -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow import configuration
from airflow.exceptions import AirflowException
# Constants for resources (megabytes are the base unit)
MB = 1
GB = 1024 * MB
TB = 1024 * GB
PB = 1024 * TB
EB = 1024 * PB
class Resource(object):
"""
Represents a resource requirement in an execution environment for an operator.
:param name: Name of the resource
:type name: string
:param units_str: The string representing the units of a resource (e.g. MB for a CPU
resource) to be used for display purposes
:type units_str: string
:param qty: The number of units of the specified resource that are required for
execution of the operator.
:type qty: long
"""
def __init__(self, name, units_str, qty):
if qty < 0:
raise AirflowException(
'Received resource quantity {} for resource {} but resource quantity '
'must be non-negative.'.format(qty, name))
self._name = name
self._units_str = units_str
self._qty = qty
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return str(self.__dict__)
@property
def name(self):
return self._name
@property
def units_str(self):
return self._units_str
@property
def qty(self):
return self._qty
class CpuResource(Resource):
def __init__(self, qty):
super(CpuResource, self).__init__('CPU', 'core(s)', qty)
class RamResource(Resource):
def __init__(self, qty):
super(RamResource, self).__init__('RAM', 'MB', qty)
class DiskResource(Resource):
def __init__(self, qty):
super(DiskResource, self).__init__('Disk', 'MB', qty)
class GpuResource(Resource):
def __init__(self, qty):
super(GpuResource, self).__init__('GPU', 'gpu(s)', qty)
class Resources(object):
"""
The resources required by an operator. Resources that are not specified will use the
default values from the airflow config.
:param cpus: The number of cpu cores that are required
:type cpus: long
:param ram: The amount of RAM required
:type ram: long
:param disk: The amount of disk space required
:type disk: long
:param gpus: The number of gpu units that are required
:type gpus: long
"""
def __init__(self,
cpus=configuration.conf.getint('operators', 'default_cpus'),
ram=configuration.conf.getint('operators', 'default_ram'),
disk=configuration.conf.getint('operators', 'default_disk'),
gpus=configuration.conf.getint('operators', 'default_gpus')
):
self.cpus = CpuResource(cpus)
self.ram = RamResource(ram)
self.disk = DiskResource(disk)
self.gpus = GpuResource(gpus)
def __eq__(self, other):
return self.__dict__ == other.__dict__
def __repr__(self):
return str(self.__dict__)
|
apache-2.0
|
nicproulx/mne-python
|
mne/preprocessing/tests/test_xdawn.py
|
3
|
7741
|
# Authors: Alexandre Barachant <alexandre.barachant@gmail.com>
# Jean-Remi King <jeanremi.king@gmail.com>
#
# License: BSD (3-clause)
import numpy as np
import os.path as op
from nose.tools import assert_equal, assert_raises, assert_true
from numpy.testing import assert_array_equal, assert_array_almost_equal
from mne import Epochs, read_events, pick_types, compute_raw_covariance
from mne.io import read_raw_fif
from mne.utils import requires_sklearn, run_tests_if_main
from mne.preprocessing.xdawn import Xdawn, _XdawnTransformer
base_dir = op.join(op.dirname(__file__), '..', '..', 'io', 'tests', 'data')
raw_fname = op.join(base_dir, 'test_raw.fif')
event_name = op.join(base_dir, 'test-eve.fif')
tmin, tmax = -0.1, 0.2
event_id = dict(cond2=2, cond3=3)
def _get_data():
"""Get data."""
raw = read_raw_fif(raw_fname, verbose=False, preload=True)
events = read_events(event_name)
picks = pick_types(raw.info, meg=False, eeg=True, stim=False,
ecg=False, eog=False,
exclude='bads')[::8]
return raw, events, picks
def test_xdawn():
"""Test init of xdawn."""
# Init xdawn with good parameters
Xdawn(n_components=2, correct_overlap='auto', signal_cov=None, reg=None)
# Init xdawn with bad parameters
assert_raises(ValueError, Xdawn, correct_overlap=42)
def test_xdawn_fit():
"""Test Xdawn fit."""
# Get data
raw, events, picks = _get_data()
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, baseline=None, verbose=False)
# =========== Basic Fit test =================
# Test base xdawn
xd = Xdawn(n_components=2, correct_overlap='auto')
xd.fit(epochs)
# With these parameters, the overlap correction must be False
assert_equal(xd.correct_overlap_, False)
# No overlap correction should give averaged evoked
evoked = epochs['cond2'].average()
assert_array_equal(evoked.data, xd.evokeds_['cond2'].data)
# ========== with signal cov provided ====================
# Provide covariance object
signal_cov = compute_raw_covariance(raw, picks=picks)
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=signal_cov)
xd.fit(epochs)
# Provide ndarray
signal_cov = np.eye(len(picks))
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=signal_cov)
xd.fit(epochs)
# Provide ndarray of bad shape
signal_cov = np.eye(len(picks) - 1)
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=signal_cov)
assert_raises(ValueError, xd.fit, epochs)
# Provide another type
signal_cov = 42
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=signal_cov)
assert_raises(ValueError, xd.fit, epochs)
# Fit with baseline correction and overlap correction should throw an
# error
# XXX This is a buggy test, the epochs here don't overlap
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, baseline=(None, 0), verbose=False)
xd = Xdawn(n_components=2, correct_overlap=True)
assert_raises(ValueError, xd.fit, epochs)
def test_xdawn_apply_transform():
"""Test Xdawn apply and transform."""
# Get data
raw, events, picks = _get_data()
raw.pick_types(eeg=True, meg=False)
epochs = Epochs(raw, events, event_id, tmin, tmax, proj=False,
preload=True, baseline=None,
verbose=False)
n_components = 2
# Fit Xdawn
xd = Xdawn(n_components=n_components, correct_overlap=False)
xd.fit(epochs)
# Apply on different types of instances
for inst in [raw, epochs.average(), epochs]:
denoise = xd.apply(inst)
# Apply on other thing should raise an error
assert_raises(ValueError, xd.apply, 42)
# Transform on epochs
xd.transform(epochs)
# Transform on ndarray
xd.transform(epochs._data)
# Transform on someting else
assert_raises(ValueError, xd.transform, 42)
# Check numerical results with shuffled epochs
np.random.seed(0) # random makes unstable linalg
idx = np.arange(len(epochs))
np.random.shuffle(idx)
xd.fit(epochs[idx])
denoise_shfl = xd.apply(epochs)
assert_array_almost_equal(denoise['cond2']._data,
denoise_shfl['cond2']._data)
@requires_sklearn
def test_xdawn_regularization():
"""Test Xdawn with regularization."""
# Get data
raw, events, picks = _get_data()
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, baseline=None, verbose=False)
# Test with overlapping events.
# modify events to simulate one overlap
events = epochs.events
sel = np.where(events[:, 2] == 2)[0][:2]
modified_event = events[sel[0]]
modified_event[0] += 1
epochs.events[sel[1]] = modified_event
# Fit and check that overlap was found and applied
xd = Xdawn(n_components=2, correct_overlap='auto', reg='oas')
xd.fit(epochs)
assert_equal(xd.correct_overlap_, True)
evoked = epochs['cond2'].average()
assert_true(np.sum(np.abs(evoked.data - xd.evokeds_['cond2'].data)))
# With covariance regularization
for reg in [.1, 0.1, 'ledoit_wolf', 'oas']:
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=np.eye(len(picks)), reg=reg)
xd.fit(epochs)
# With bad shrinkage
xd = Xdawn(n_components=2, correct_overlap=False,
signal_cov=np.eye(len(picks)), reg=2)
assert_raises(ValueError, xd.fit, epochs)
@requires_sklearn
def test_XdawnTransformer():
"""Test _XdawnTransformer."""
# Get data
raw, events, picks = _get_data()
epochs = Epochs(raw, events, event_id, tmin, tmax, picks=picks,
preload=True, baseline=None, verbose=False)
X = epochs._data
y = epochs.events[:, -1]
# Fit
xdt = _XdawnTransformer()
xdt.fit(X, y)
assert_raises(ValueError, xdt.fit, X, y[1:])
assert_raises(ValueError, xdt.fit, 'foo')
# Provide covariance object
signal_cov = compute_raw_covariance(raw, picks=picks)
xdt = _XdawnTransformer(signal_cov=signal_cov)
xdt.fit(X, y)
# Provide ndarray
signal_cov = np.eye(len(picks))
xdt = _XdawnTransformer(signal_cov=signal_cov)
xdt.fit(X, y)
# Provide ndarray of bad shape
signal_cov = np.eye(len(picks) - 1)
xdt = _XdawnTransformer(signal_cov=signal_cov)
assert_raises(ValueError, xdt.fit, X, y)
# Provide another type
signal_cov = 42
xdt = _XdawnTransformer(signal_cov=signal_cov)
assert_raises(ValueError, xdt.fit, X, y)
# Fit with y as None
xdt = _XdawnTransformer()
xdt.fit(X)
# Compare xdawn and _XdawnTransformer
xd = Xdawn(correct_overlap=False)
xd.fit(epochs)
xdt = _XdawnTransformer()
xdt.fit(X, y)
assert_array_almost_equal(xd.filters_['cond2'][:, :2],
xdt.filters_.reshape(2, 2, 8)[0].T)
# Transform testing
xdt.transform(X[1:, ...]) # different number of epochs
xdt.transform(X[:, :, 1:]) # different number of time
assert_raises(ValueError, xdt.transform, X[:, 1:, :])
Xt = xdt.transform(X)
assert_raises(ValueError, xdt.transform, 42)
# Inverse transform testing
Xinv = xdt.inverse_transform(Xt)
assert_equal(Xinv.shape, X.shape)
xdt.inverse_transform(Xt[1:, ...])
xdt.inverse_transform(Xt[:, :, 1:])
# should raise an error if not correct number of components
assert_raises(ValueError, xdt.inverse_transform, Xt[:, 1:, :])
assert_raises(ValueError, xdt.inverse_transform, 42)
run_tests_if_main()
|
bsd-3-clause
|
MiLk/ansible
|
lib/ansible/modules/network/cloudengine/ce_vrf_af.py
|
47
|
30552
|
#!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: ce_vrf_af
version_added: "2.4"
short_description: Manages VPN instance address family on HUAWEI CloudEngine switches.
description:
- Manages VPN instance address family of HUAWEI CloudEngine switches.
author: Yang yang (@CloudEngine-Ansible)
notes:
- If I(state=absent), the vrf will be removed, regardless of the
non-required parameters.
options:
vrf:
description:
- VPN instance.
required: true
default: null
vrf_aftype:
description:
- VPN instance address family.
required: false
choices: ['v4','v6']
default: v4
route_distinguisher:
description:
- VPN instance route distinguisher,the RD used to distinguish same route prefix from different vpn.
The RD must be setted before setting vpn_target_value.
required: false
vpn_target_state:
description:
- Manage the state of the vpn target.
required: false
choices: ['present','absent']
vpn_target_type:
description:
- VPN instance vpn target type.
required: false
choices: ['export_extcommunity', 'import_extcommunity']
default: null
vpn_target_value:
description:
- VPN instance target value. Such as X.X.X.X:number<0-65535> or number<0-65535>:number<0-4294967295>
or number<0-65535>.number<0-65535>:number<0-65535> or number<65536-4294967295>:number<0-65535>
but not support 0:0 and 0.0:0.
required: false
evpn:
description:
- Is extend vpn or normal vpn.
required: false
choices: ['true', 'false']
default: false
state:
description:
- Manage the state of the af.
required: false
choices: ['present','absent']
default: present
'''
EXAMPLES = '''
- name: vrf af module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Config vpna, set address family is ipv4
ce_vrf_af:
vrf: vpna
vrf_aftype: v4
state: present
provider: "{{ cli }}"
- name: Config vpna, delete address family is ipv4
ce_vrf_af:
vrf: vpna
vrf_aftype: v4
state: absent
provider: "{{ cli }}"
- name: Config vpna, set address family is ipv4,rd=1:1,set vpn_target_type=export_extcommunity,vpn_target_value=2:2
ce_vrf_af:
vrf: vpna
vrf_aftype: v4
route_distinguisher: 1:1
vpn_target_type: export_extcommunity
vpn_target_value: 2:2
vpn_target_state: present
state: present
provider: "{{ cli }}"
- name: Config vpna, set address family is ipv4,rd=1:1,delete vpn_target_type=export_extcommunity,vpn_target_value=2:2
ce_vrf_af:
vrf: vpna
vrf_aftype: v4
route_distinguisher: 1:1
vpn_target_type: export_extcommunity
vpn_target_value: 2:2
vpn_target_state: absent
state: present
provider: "{{ cli }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"vrf": "vpna",
"vrf_aftype": "v4",
"state": "present",
"vpn_targe_state":"absent",
"evpn": "none",
"vpn_target_type": "none",
"vpn_target_value": "none"}
existing:
description: k/v pairs of existing switchport
returned: always
type: dict
sample: {
"route_distinguisher": [
"1:1",
"2:2"
],
"vpn_target_type": [],
"vpn_target_value": [],
"vrf": "vpna",
"vrf_aftype": [
"ipv4uni",
"ipv6uni"
]
}
end_state:
description: k/v pairs of switchport after module execution
returned: always
type: dict
sample: {
"route_distinguisher": [
"1:1",
"2:2"
],
"vpn_target_type": [
"import_extcommunity",
"3:3"
],
"vpn_target_value": [],
"vrf": "vpna",
"vrf_aftype": [
"ipv4uni",
"ipv6uni"
]
}
updates:
description: command list sent to the device
returned: always
type: list
sample: [
"ip vpn-instance vpna",
"vpn-target 3:3 import_extcommunity"
]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_VRF = """
<filter type="subtree">
<l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<l3vpncomm>
<l3vpnInstances>
<l3vpnInstance>
<vrfName></vrfName>
<vrfDescription></vrfDescription>
</l3vpnInstance>
</l3vpnInstances>
</l3vpncomm>
</l3vpn>
</filter>
"""
CE_NC_GET_VRF_AF = """
<filter type="subtree">
<l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<l3vpncomm>
<l3vpnInstances>
<l3vpnInstance>
<vrfName>%s</vrfName>
<vpnInstAFs>
<vpnInstAF>
<afType></afType>
<vrfRD></vrfRD>%s
</vpnInstAF>
</vpnInstAFs>
</l3vpnInstance>
</l3vpnInstances>
</l3vpncomm>
</l3vpn>
</filter>
"""
CE_NC_DELETE_VRF_AF = """
<l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<l3vpncomm>
<l3vpnInstances>
<l3vpnInstance>
<vrfName>%s</vrfName>
<vpnInstAFs>
<vpnInstAF operation="delete">
<afType>%s</afType>
</vpnInstAF>
</vpnInstAFs>
</l3vpnInstance>
</l3vpnInstances>
</l3vpncomm>
</l3vpn>
"""
CE_NC_CREATE_VRF_AF = """
<l3vpn xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<l3vpncomm>
<l3vpnInstances>
<l3vpnInstance>
<vrfName>%s</vrfName>
<vpnInstAFs>
<vpnInstAF operation="merge">
<afType>%s</afType>
<vrfRD>%s</vrfRD>%s
</vpnInstAF>
</vpnInstAFs>
</l3vpnInstance>
</l3vpnInstances>
</l3vpncomm></l3vpn>
"""
CE_NC_CREATE_VRF_TARGET = """
<vpnTargets>
<vpnTarget operation="merge">
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
</vpnTarget>
</vpnTargets>
"""
CE_NC_DELETE_VRF_TARGET = """
<vpnTargets>
<vpnTarget operation="delete">
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
</vpnTarget>
</vpnTargets>
"""
CE_NC_GET_VRF_TARGET = """
<vpnTargets>
<vpnTarget>
<vrfRTValue></vrfRTValue>
<vrfRTType></vrfRTType>
</vpnTarget>
</vpnTargets>
"""
CE_NC_CREATE_EXTEND_VRF_TARGET = """
<exVpnTargets>
<exVpnTarget operation="merge">
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
<extAddrFamily>evpn</extAddrFamily>
</exVpnTarget>
</exVpnTargets>
"""
CE_NC_DELETE_EXTEND_VRF_TARGET = """
<exVpnTargets>
<exVpnTarget operation="delete">
<vrfRTType>%s</vrfRTType>
<vrfRTValue>%s</vrfRTValue>
<extAddrFamily>evpn</extAddrFamily>
</exVpnTarget>
</exVpnTargets>
"""
CE_NC_GET_EXTEND_VRF_TARGET = """
<exVpnTargets>
<exVpnTarget>
<vrfRTType></vrfRTType>
<vrfRTValue></vrfRTValue>
<extAddrFamily></extAddrFamily>
</exVpnTarget>
</exVpnTargets>
"""
def build_config_xml(xmlstr):
"""build_config_xml"""
return '<config> ' + xmlstr + ' </config>'
def is_valid_value(vrf_targe_value):
"""check if the vrf target value is valid"""
each_num = None
if len(vrf_targe_value) > 21 or len(vrf_targe_value) < 3:
return False
if vrf_targe_value.find(':') == -1:
return False
elif vrf_targe_value == '0:0':
return False
elif vrf_targe_value == '0.0:0':
return False
else:
value_list = vrf_targe_value.split(':')
if value_list[0].find('.') != -1:
if not value_list[1].isdigit():
return False
if int(value_list[1]) > 65535:
return False
value = value_list[0].split('.')
if len(value) == 4:
for each_num in value:
if not each_num.isdigit():
return False
if int(each_num) > 255:
return False
return True
elif len(value) == 2:
for each_num in value:
if not each_num.isdigit():
return False
if int(each_num) > 65535:
return False
return True
else:
return False
elif not value_list[0].isdigit():
return False
elif not value_list[1].isdigit():
return False
elif int(value_list[0]) < 65536 and int(value_list[1]) < 4294967296:
return True
elif int(value_list[0]) > 65535 and int(value_list[0]) < 4294967296:
return bool(int(value_list[1]) < 65536)
else:
return False
class VrfAf(object):
"""manage the vrf address family and export/import target"""
def __init__(self, argument_spec, ):
self.spec = argument_spec
self.module = None
self.init_module()
# vpn instance info
self.vrf = self.module.params['vrf']
self.vrf_aftype = self.module.params['vrf_aftype']
if self.vrf_aftype == 'v4':
self.vrf_aftype = 'ipv4uni'
else:
self.vrf_aftype = 'ipv6uni'
self.route_distinguisher = self.module.params['route_distinguisher']
self.evpn = self.module.params['evpn']
self.vpn_target_type = self.module.params['vpn_target_type']
self.vpn_target_value = self.module.params['vpn_target_value']
self.vpn_target_state = self.module.params['vpn_target_state']
self.state = self.module.params['state']
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
self.vpn_target_changed = False
self.vrf_af_type_changed = False
self.vrf_rd_changed = False
self.vrf_af_info = dict()
def init_module(self):
"""init_module"""
self.module = AnsibleModule(
argument_spec=self.spec, supports_check_mode=True)
def check_response(self, xml_str, xml_name):
"""Check if response message is already succeed."""
if "<ok/>" not in xml_str:
self.module.fail_json(msg='Error: %s failed.' % xml_name)
def is_vrf_af_exist(self):
"""is vrf address family exist"""
if not self.vrf_af_info:
return False
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
if vrf_af_ele["afType"] == self.vrf_aftype:
return True
else:
continue
return False
def get_exist_rd(self):
"""get exist route distinguisher """
if not self.vrf_af_info:
return None
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
if vrf_af_ele["afType"] == self.vrf_aftype:
if vrf_af_ele["vrfRD"] is None:
return None
else:
return vrf_af_ele["vrfRD"]
else:
continue
return None
def is_vrf_rd_exist(self):
"""is vrf route distinguisher exist"""
if not self.vrf_af_info:
return False
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
if vrf_af_ele["afType"] == self.vrf_aftype:
if vrf_af_ele["vrfRD"] is None:
return False
if self.route_distinguisher is not None:
return bool(vrf_af_ele["vrfRD"] == self.route_distinguisher)
else:
return True
else:
continue
return False
def is_vrf_rt_exist(self):
"""is vpn target exist"""
if not self.vrf_af_info:
return False
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
if vrf_af_ele["afType"] == self.vrf_aftype:
if self.evpn is False:
if not vrf_af_ele.get("vpnTargets"):
return False
for vpn_target in vrf_af_ele.get("vpnTargets"):
if vpn_target["vrfRTType"] == self.vpn_target_type \
and vpn_target["vrfRTValue"] == self.vpn_target_value:
return True
else:
continue
else:
if not vrf_af_ele.get("evpnTargets"):
return False
for evpn_target in vrf_af_ele.get("evpnTargets"):
if evpn_target["vrfRTType"] == self.vpn_target_type \
and evpn_target["vrfRTValue"] == self.vpn_target_value:
return True
else:
continue
else:
continue
return False
def set_update_cmd(self):
""" set update command"""
if not self.changed:
return
if self.state == "present":
self.updates_cmd.append('ip vpn-instance %s' % (self.vrf))
if self.vrf_aftype == 'ipv4uni':
self.updates_cmd.append('ipv4-family')
elif self.vrf_aftype == 'ipv6uni':
self.updates_cmd.append('ipv6-family')
if self.route_distinguisher:
if not self.is_vrf_rd_exist():
self.updates_cmd.append(
'route-distinguisher %s' % self.route_distinguisher)
else:
if self.get_exist_rd() is not None:
self.updates_cmd.append(
'undo route-distinguisher %s' % self.get_exist_rd())
if self.vpn_target_state == "present":
if not self.is_vrf_rt_exist():
if self.evpn is False:
self.updates_cmd.append(
'vpn-target %s %s' % (self.vpn_target_value, self.vpn_target_type))
else:
self.updates_cmd.append(
'vpn-target %s %s evpn' % (self.vpn_target_value, self.vpn_target_type))
elif self.vpn_target_state == "absent":
if self.is_vrf_rt_exist():
if self.evpn is False:
self.updates_cmd.append(
'undo vpn-target %s %s' % (self.vpn_target_value, self.vpn_target_type))
else:
self.updates_cmd.append(
'undo vpn-target %s %s evpn' % (self.vpn_target_value, self.vpn_target_type))
else:
self.updates_cmd.append('ip vpn-instance %s' % (self.vrf))
if self.vrf_aftype == 'ipv4uni':
self.updates_cmd.append('undo ipv4-family')
elif self.vrf_aftype == 'ipv6uni':
self.updates_cmd.append('undo ipv6-family')
def get_vrf(self):
""" check if vrf is need to change"""
getxmlstr = CE_NC_GET_VRF
xmlstr_new_1 = (self.vrf.lower())
xml_str = get_nc_config(self.module, getxmlstr)
re_find_1 = re.findall(
r'.*<vrfname>(.*)</vrfname>.*', xml_str.lower())
if re_find_1 is None:
return False
return xmlstr_new_1 in re_find_1
def get_vrf_af(self):
""" check if vrf is need to change"""
self.vrf_af_info["vpnInstAF"] = list()
if self.evpn is True:
getxmlstr = CE_NC_GET_VRF_AF % (
self.vrf, CE_NC_GET_EXTEND_VRF_TARGET)
else:
getxmlstr = CE_NC_GET_VRF_AF % (self.vrf, CE_NC_GET_VRF_TARGET)
xml_str = get_nc_config(self.module, getxmlstr)
if 'data/' in xml_str:
return self.state == 'present'
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
# get the vpn address family and RD text
vrf_addr_types = root.findall(
"data/l3vpn/l3vpncomm/l3vpnInstances/l3vpnInstance/vpnInstAFs/vpnInstAF")
if vrf_addr_types:
for vrf_addr_type in vrf_addr_types:
vrf_af_info = dict()
for vrf_addr_type_ele in vrf_addr_type:
if vrf_addr_type_ele.tag in ["vrfName", "afType", "vrfRD"]:
vrf_af_info[vrf_addr_type_ele.tag] = vrf_addr_type_ele.text
if vrf_addr_type_ele.tag == 'vpnTargets':
vrf_af_info["vpnTargets"] = list()
for rtargets in vrf_addr_type_ele:
rt_dict = dict()
for rtarget in rtargets:
if rtarget.tag in ["vrfRTValue", "vrfRTType"]:
rt_dict[rtarget.tag] = rtarget.text
vrf_af_info["vpnTargets"].append(rt_dict)
if vrf_addr_type_ele.tag == 'exVpnTargets':
vrf_af_info["evpnTargets"] = list()
for rtargets in vrf_addr_type_ele:
rt_dict = dict()
for rtarget in rtargets:
if rtarget.tag in ["vrfRTValue", "vrfRTType"]:
rt_dict[rtarget.tag] = rtarget.text
vrf_af_info["evpnTargets"].append(rt_dict)
self.vrf_af_info["vpnInstAF"].append(vrf_af_info)
def check_params(self):
"""Check all input params"""
# vrf and description check
if self.vrf == '_public_':
self.module.fail_json(
msg='Error: The vrf name _public_ is reserved.')
if not self.get_vrf():
self.module.fail_json(
msg='Error: The vrf name do not exist.')
if self.state == 'present':
if self.route_distinguisher:
if not is_valid_value(self.route_distinguisher):
self.module.fail_json(msg='Error:The vrf route distinguisher length must between 3 ~ 21,'
'i.e. X.X.X.X:number<0-65535> or number<0-65535>:number<0-4294967295>'
'or number<0-65535>.number<0-65535>:number<0-65535>'
'or number<65536-4294967295>:number<0-65535>'
' but not be 0:0 or 0.0:0.')
if not self.vpn_target_state:
if self.vpn_target_value or self.vpn_target_type:
self.module.fail_json(
msg='Error: The vpn target state should be exist.')
if self.vpn_target_state:
if not self.vpn_target_value or not self.vpn_target_type:
self.module.fail_json(
msg='Error: The vpn target value and type should be exist.')
if self.vpn_target_value:
if not is_valid_value(self.vpn_target_value):
self.module.fail_json(msg='Error:The vrf target value length must between 3 ~ 21,'
'i.e. X.X.X.X:number<0-65535> or number<0-65535>:number<0-4294967295>'
'or number<0-65535>.number<0-65535>:number<0-65535>'
'or number<65536-4294967295>:number<0-65535>'
' but not be 0:0 or 0.0:0.')
def operate_vrf_af(self):
"""config/delete vrf"""
vrf_target_operate = ''
if self.route_distinguisher is None:
route_d = ''
else:
route_d = self.route_distinguisher
if self.state == 'present':
if self.vrf_aftype:
if self.is_vrf_af_exist():
self.vrf_af_type_changed = False
else:
self.vrf_af_type_changed = True
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
else:
self.vrf_af_type_changed = bool(self.is_vrf_af_exist())
if self.vpn_target_state == 'present':
if self.evpn is False and not self.is_vrf_rt_exist():
vrf_target_operate = CE_NC_CREATE_VRF_TARGET % (
self.vpn_target_type, self.vpn_target_value)
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vpn_target_changed = True
if self.evpn is True and not self.is_vrf_rt_exist():
vrf_target_operate = CE_NC_CREATE_EXTEND_VRF_TARGET % (
self.vpn_target_type, self.vpn_target_value)
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vpn_target_changed = True
elif self.vpn_target_state == 'absent':
if self.evpn is False and self.is_vrf_rt_exist():
vrf_target_operate = CE_NC_DELETE_VRF_TARGET % (
self.vpn_target_type, self.vpn_target_value)
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vpn_target_changed = True
if self.evpn is True and self.is_vrf_rt_exist():
vrf_target_operate = CE_NC_DELETE_EXTEND_VRF_TARGET % (
self.vpn_target_type, self.vpn_target_value)
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vpn_target_changed = True
else:
if self.route_distinguisher:
if not self.is_vrf_rd_exist():
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vrf_rd_changed = True
else:
self.vrf_rd_changed = False
else:
if self.is_vrf_rd_exist():
configxmlstr = CE_NC_CREATE_VRF_AF % (
self.vrf, self.vrf_aftype, route_d, vrf_target_operate)
self.vrf_rd_changed = True
else:
self.vrf_rd_changed = False
if not self.vrf_rd_changed and not self.vrf_af_type_changed and not self.vpn_target_changed:
self.changed = False
else:
self.changed = True
else:
if self.is_vrf_af_exist():
configxmlstr = CE_NC_DELETE_VRF_AF % (
self.vrf, self.vrf_aftype)
self.changed = True
else:
self.changed = False
if not self.changed:
return
conf_str = build_config_xml(configxmlstr)
recv_xml = set_nc_config(self.module, conf_str)
self.check_response(recv_xml, "OPERATE_VRF_AF")
def get_proposed(self):
"""get_proposed"""
if self.state == 'present':
self.proposed['vrf'] = self.vrf
if self.vrf_aftype is None:
self.proposed['vrf_aftype'] = 'ipv4uni'
else:
self.proposed['vrf_aftype'] = self.vrf_aftype
if self.route_distinguisher is not None:
self.proposed['route_distinguisher'] = self.route_distinguisher
else:
self.proposed['route_distinguisher'] = list()
if self.vpn_target_state == 'present':
self.proposed['evpn'] = self.evpn
self.proposed['vpn_target_type'] = self.vpn_target_type
self.proposed['vpn_target_value'] = self.vpn_target_value
else:
self.proposed['vpn_target_type'] = list()
self.proposed['vpn_target_value'] = list()
else:
self.proposed = dict()
self.proposed['state'] = self.state
self.proposed['vrf'] = self.vrf
self.proposed['vrf_aftype'] = list()
self.proposed['route_distinguisher'] = list()
self.proposed['vpn_target_value'] = list()
self.proposed['vpn_target_type'] = list()
def get_existing(self):
"""get_existing"""
self.get_vrf_af()
self.existing['vrf'] = self.vrf
self.existing['vrf_aftype'] = list()
self.existing['route_distinguisher'] = list()
self.existing['vpn_target_value'] = list()
self.existing['vpn_target_type'] = list()
self.existing['evpn_target_value'] = list()
self.existing['evpn_target_type'] = list()
if self.vrf_af_info["vpnInstAF"] is None:
return
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
self.existing['vrf_aftype'].append(vrf_af_ele["afType"])
self.existing['route_distinguisher'].append(
vrf_af_ele["vrfRD"])
if vrf_af_ele.get("vpnTargets"):
for vpn_target in vrf_af_ele.get("vpnTargets"):
self.existing['vpn_target_type'].append(
vpn_target["vrfRTType"])
self.existing['vpn_target_value'].append(
vpn_target["vrfRTValue"])
if vrf_af_ele.get("evpnTargets"):
for evpn_target in vrf_af_ele.get("evpnTargets"):
self.existing['evpn_target_type'].append(
evpn_target["vrfRTType"])
self.existing['evpn_target_value'].append(
evpn_target["vrfRTValue"])
def get_end_state(self):
"""get_end_state"""
self.get_vrf_af()
self.end_state['vrf'] = self.vrf
self.end_state['vrf_aftype'] = list()
self.end_state['route_distinguisher'] = list()
self.end_state['vpn_target_value'] = list()
self.end_state['vpn_target_type'] = list()
self.end_state['evpn_target_value'] = list()
self.end_state['evpn_target_type'] = list()
if self.vrf_af_info["vpnInstAF"] is None:
return
for vrf_af_ele in self.vrf_af_info["vpnInstAF"]:
self.end_state['vrf_aftype'].append(vrf_af_ele["afType"])
self.end_state['route_distinguisher'].append(vrf_af_ele["vrfRD"])
if vrf_af_ele.get("vpnTargets"):
for vpn_target in vrf_af_ele.get("vpnTargets"):
self.end_state['vpn_target_type'].append(
vpn_target["vrfRTType"])
self.end_state['vpn_target_value'].append(
vpn_target["vrfRTValue"])
if vrf_af_ele.get("evpnTargets"):
for evpn_target in vrf_af_ele.get("evpnTargets"):
self.end_state['evpn_target_type'].append(
evpn_target["vrfRTType"])
self.end_state['evpn_target_value'].append(
evpn_target["vrfRTValue"])
def work(self):
"""worker"""
self.check_params()
self.get_existing()
self.get_proposed()
self.operate_vrf_af()
self.set_update_cmd()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""main"""
argument_spec = dict(
vrf=dict(required=True, type='str'),
vrf_aftype=dict(choices=['v4', 'v6'],
default='v4', required=False),
route_distinguisher=dict(required=False, type='str'),
evpn=dict(type='bool', default=False),
vpn_target_type=dict(
choices=['export_extcommunity', 'import_extcommunity'], required=False),
vpn_target_value=dict(required=False, type='str'),
vpn_target_state=dict(choices=['absent', 'present'], required=False),
state=dict(choices=['absent', 'present'],
default='present', required=False),
)
argument_spec.update(ce_argument_spec)
interface = VrfAf(argument_spec)
interface.work()
if __name__ == '__main__':
main()
|
gpl-3.0
|
wkritzinger/asuswrt-merlin
|
release/src/router/wget/testenv/conf/expected_files.py
|
17
|
1873
|
from difflib import unified_diff
import os
import sys
from conf import hook
from exc.test_failed import TestFailed
""" Post-Test Hook: ExpectedFiles
This is a Post-Test hook that checks the test directory for the files it
contains. A dictionary object is passed to it, which contains a mapping of
filenames and contents of all the files that the directory is expected to
contain.
Raises a TestFailed exception if the expected files are not found or if extra
files are found, else returns gracefully.
"""
@hook()
class ExpectedFiles:
def __init__(self, expected_fs):
self.expected_fs = expected_fs
@staticmethod
def gen_local_fs_snapshot():
snapshot = {}
for parent, dirs, files in os.walk('.'):
for name in files:
f = {'content': ''}
file_path = os.path.join(parent, name)
with open(file_path) as fp:
f['content'] = fp.read()
snapshot[file_path[2:]] = f
return snapshot
def __call__(self, test_obj):
local_fs = self.gen_local_fs_snapshot()
for file in self.expected_fs:
if file.name in local_fs:
local_file = local_fs.pop(file.name)
if file.content != local_file['content']:
for line in unified_diff(local_file['content'],
file.content,
fromfile='Actual',
tofile='Expected'):
print(line, file=sys.stderr)
raise TestFailed('Contents of %s do not match' % file.name)
else:
raise TestFailed('Expected file %s not found.' % file.name)
if local_fs:
print(local_fs)
raise TestFailed('Extra files downloaded.')
|
gpl-2.0
|
40123148/w17b
|
static/Brython3.1.3-20150514-095342/Lib/stat.py
|
765
|
4304
|
"""Constants/functions for interpreting results of os.stat() and os.lstat().
Suggested usage: from stat import *
"""
# Indices for stat struct members in the tuple returned by os.stat()
ST_MODE = 0
ST_INO = 1
ST_DEV = 2
ST_NLINK = 3
ST_UID = 4
ST_GID = 5
ST_SIZE = 6
ST_ATIME = 7
ST_MTIME = 8
ST_CTIME = 9
# Extract bits from the mode
def S_IMODE(mode):
"""Return the portion of the file's mode that can be set by
os.chmod().
"""
return mode & 0o7777
def S_IFMT(mode):
"""Return the portion of the file's mode that describes the
file type.
"""
return mode & 0o170000
# Constants used as S_IFMT() for various file types
# (not all are implemented on all systems)
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFBLK = 0o060000 # block device
S_IFREG = 0o100000 # regular file
S_IFIFO = 0o010000 # fifo (named pipe)
S_IFLNK = 0o120000 # symbolic link
S_IFSOCK = 0o140000 # socket file
# Functions to test for each file type
def S_ISDIR(mode):
"""Return True if mode is from a directory."""
return S_IFMT(mode) == S_IFDIR
def S_ISCHR(mode):
"""Return True if mode is from a character special device file."""
return S_IFMT(mode) == S_IFCHR
def S_ISBLK(mode):
"""Return True if mode is from a block special device file."""
return S_IFMT(mode) == S_IFBLK
def S_ISREG(mode):
"""Return True if mode is from a regular file."""
return S_IFMT(mode) == S_IFREG
def S_ISFIFO(mode):
"""Return True if mode is from a FIFO (named pipe)."""
return S_IFMT(mode) == S_IFIFO
def S_ISLNK(mode):
"""Return True if mode is from a symbolic link."""
return S_IFMT(mode) == S_IFLNK
def S_ISSOCK(mode):
"""Return True if mode is from a socket."""
return S_IFMT(mode) == S_IFSOCK
# Names for permission bits
S_ISUID = 0o4000 # set UID bit
S_ISGID = 0o2000 # set GID bit
S_ENFMT = S_ISGID # file locking enforcement
S_ISVTX = 0o1000 # sticky bit
S_IREAD = 0o0400 # Unix V7 synonym for S_IRUSR
S_IWRITE = 0o0200 # Unix V7 synonym for S_IWUSR
S_IEXEC = 0o0100 # Unix V7 synonym for S_IXUSR
S_IRWXU = 0o0700 # mask for owner permissions
S_IRUSR = 0o0400 # read by owner
S_IWUSR = 0o0200 # write by owner
S_IXUSR = 0o0100 # execute by owner
S_IRWXG = 0o0070 # mask for group permissions
S_IRGRP = 0o0040 # read by group
S_IWGRP = 0o0020 # write by group
S_IXGRP = 0o0010 # execute by group
S_IRWXO = 0o0007 # mask for others (not in group) permissions
S_IROTH = 0o0004 # read by others
S_IWOTH = 0o0002 # write by others
S_IXOTH = 0o0001 # execute by others
# Names for file flags
UF_NODUMP = 0x00000001 # do not dump file
UF_IMMUTABLE = 0x00000002 # file may not be changed
UF_APPEND = 0x00000004 # file may only be appended to
UF_OPAQUE = 0x00000008 # directory is opaque when viewed through a union stack
UF_NOUNLINK = 0x00000010 # file may not be renamed or deleted
UF_COMPRESSED = 0x00000020 # OS X: file is hfs-compressed
UF_HIDDEN = 0x00008000 # OS X: file should not be displayed
SF_ARCHIVED = 0x00010000 # file may be archived
SF_IMMUTABLE = 0x00020000 # file may not be changed
SF_APPEND = 0x00040000 # file may only be appended to
SF_NOUNLINK = 0x00100000 # file may not be renamed or deleted
SF_SNAPSHOT = 0x00200000 # file is a snapshot file
_filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((S_IRUSR, "r"),),
((S_IWUSR, "w"),),
((S_IXUSR|S_ISUID, "s"),
(S_ISUID, "S"),
(S_IXUSR, "x")),
((S_IRGRP, "r"),),
((S_IWGRP, "w"),),
((S_IXGRP|S_ISGID, "s"),
(S_ISGID, "S"),
(S_IXGRP, "x")),
((S_IROTH, "r"),),
((S_IWOTH, "w"),),
((S_IXOTH|S_ISVTX, "t"),
(S_ISVTX, "T"),
(S_IXOTH, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form '-rwxrwxrwx'."""
perm = []
for table in _filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
|
agpl-3.0
|
kyle-long/pyshelf
|
tests/functional_test_base.py
|
2
|
10117
|
from boto.s3.key import Key
from mock import Mock
from moto import mock_s3
from pyproctor import MonkeyPatcher
from shelf.app import app
from shelf.error_code import ErrorCode
from shelf.metadata.initializer import Initializer
from shelf.resource_identity import ResourceIdentity
from shelf.search.container import Container as SearchContainer
from tests.metadata.comparator import Comparator as MetadataComparator
from tests.metadata_builder import MetadataBuilder
from tests.route_tester.tester import Tester
from tests.search.test_wrapper import TestWrapper as SearchTestWrapper
from tests.test_base import TestBase
import boto
import shelf.configure as configure
import tests.metadata_utils as meta_utils
import tests.permission_utils as utils
import yaml
class FunctionalTestBase(TestBase):
RESPONSE_404 = {
"message": "Resource not found",
"code": ErrorCode.RESOURCE_NOT_FOUND
}
RESPONSE_403 = {
"code": ErrorCode.FORBIDDEN,
"message": "Forbidden"
}
RESPONSE_401 = {
"code": ErrorCode.PERMISSION_DENIED,
"message": "Permission denied"
}
RESPONSE_INVALID_NAME = {
"message": "Artifact and directories names that BEGIN with an underscore are reserved as private "
"and cannot be accessed or created. This of course exludes _search and _meta which are "
"not part of the artifact path itself.",
"code": ErrorCode.INVALID_ARTIFACT_NAME
}
RESPONSE_DUPLICATE = {
"code": ErrorCode.DUPLICATE_ARTIFACT,
"message": "Artifact by name test already exists in current directory"
}
RESPONSE_INVALID_FORMAT = {
"code": ErrorCode.INVALID_REQUEST_DATA_FORMAT,
"message": "Data sent with request must be in JSON format and also be either an array or an object.",
}
CONFIG = {
"buckets": [
{
"name": "test",
"referenceName": "test",
"accessKey": "test",
"secretKey": "test"
},
{
"name": "bucket2",
"referenceName": "b2",
"accessKey": "test",
"secretKey": "test"
},
{
"name": "this-bucket-doesnt-exist-lol",
"referenceName": "thisBucketDoesntExistLol",
"accessKey": "fail",
"secretKey": "fail"
}
],
"elasticsearch": {
"connectionString": "http://localhost:9200/metadata",
"upperSearchResultLimit": 100
}
}
def setUp(self):
self.app = app
self.setup_elastic()
self.setup_moto()
self.setup_metadata()
self.test_client = app.test_client()
self._route_tester = None
self._metadata_comparator = None
self.setup_storage()
def setup_storage(self):
"""
This is important for comparing metadata later
on because we "assertEqual" but the date would
be different every time the test was run. The
solution was to patch _to_utc to always return
the same date.
"""
get_created_date = Mock(return_value=meta_utils.CREATED_DATE)
MonkeyPatcher.patch(Initializer, "_get_created_date", get_created_date)
@property
def metadata_comparator(self):
if not self._metadata_comparator:
self._metadata_comparator = MetadataComparator(
self,
FunctionalTestBase.CONFIG["elasticsearch"]["connectionString"],
app.logger)
return self._metadata_comparator
def assert_metadata_matches(self, resource_url, bucket_name=None):
"""
Makes the assumption that mock_s3 has been
enabled (done in configure_moto).
Makes sure that the metadata for a particular
artifact is the same in the search layer and
the cloud layer.
Args:
resource_url(basestring): The full path to the resource from the APIs
perspective
bucket_name(basestring): Optional. The name of the bucket the artifact
will be stored in.
Raises:
AssertionError
"""
if not bucket_name:
identity = ResourceIdentity(resource_url)
for bucket_config in FunctionalTestBase.CONFIG["buckets"]:
# identity.bucket_name is actually reference name.
# TODO: Rename this.
if identity.bucket_name == bucket_config["referenceName"]:
bucket_name = bucket_config["name"]
if not bucket_name:
self.fail("bucket_name was not provided and we failed to look it up via FunctionalTestBase.CONFIG")
self.metadata_comparator.compare(resource_url, bucket_name)
@classmethod
def setUpClass(cls):
super(FunctionalTestBase, cls).setUpClass()
configure.logger(app.logger, "DEBUG")
app.config.update(cls.CONFIG)
def setup_elastic(self):
search_container = SearchContainer(self.app.logger, FunctionalTestBase.CONFIG["elasticsearch"])
self.search_wrapper = SearchTestWrapper(search_container)
def setup_moto(self):
self.moto_s3 = mock_s3()
self.moto_s3.start()
import httpretty
# EXTREMELY IMPORTANT! If the port is not
# appended httpretty does not identify it as http
# but httplib does so the file pointer that
# is supposed to be filled up by httpetty.fakesocket.socket
# is not.
httpretty.core.POTENTIAL_HTTP_PORTS.add(9200)
self.boto_connection = boto.connect_s3()
self.boto_connection.create_bucket("test")
self.boto_connection.create_bucket("bucket2")
self.test_bucket = self.boto_connection.get_bucket("test")
self.setup_artifacts()
self.create_auth_key()
def setup_artifacts(self):
self.create_key(self.test_bucket, "test", contents="hello world")
self.create_key(self.test_bucket, "/dir/dir2/dir3/dir4/test5")
self.create_key(self.test_bucket, "/dir/dir2/dir3/nest-test", contents="hello world")
# "empty" has an empty metadata file. This is used when testing initialization
# of metadata
self.create_key(self.test_bucket, "empty", contents="hello world")
self.create_key(self.test_bucket, "/_metadata_empty.yaml")
self.create_key(self.test_bucket, "/dir/dir2/_secret", "No one should see this")
self.create_key(self.test_bucket, "/dir/dir2/not_secret", "You can see this though")
def create_key(self, bucket, artifact_name, contents=None):
"""
Creates an artifact in moto.
Args:
bucket(boto.s3.bucket.Bucket)
artifact_name(string)
contents(string | None)
"""
if contents is None:
contents = ""
key = Key(bucket, artifact_name)
key.set_contents_from_string(contents)
def setup_metadata(self):
self.add_metadata("/test/artifact/test")
self.add_metadata("/test/artifact/dir/dir2/dir3/nest-test")
self.add_metadata("/test/artifact/this/that/other", "1.2")
self.add_metadata("/test/artifact/thing", "1.2"),
self.add_metadata("/test/artifact/blah", "1.19"),
self.add_metadata("/test/artifact/a", "1.19"),
self.add_metadata("/test/artifact/zzzz", "1.19"),
self.add_metadata("/test/artifact/dir/dir2/Test", "2")
self.search_wrapper.refresh_index()
def add_metadata(self, resource_path, version="1", metadata=None):
"""
Adds metadata to moto and elastic.
"""
resource_id = ResourceIdentity(resource_path)
data = meta_utils.get_meta(resource_id.artifact_name, resource_id.resource_path, version)
if metadata:
data.update(metadata)
key = Key(self.boto_connection.get_bucket(resource_id.bucket_name), resource_id.cloud_metadata)
key.set_contents_from_string(yaml.dump(data))
self.search_wrapper.add_metadata(resource_id.search, data)
def create_auth_key(self):
# TODO: Revamp the permissions utils stuff. I am not
# a fan of how it works.
self.auth = self.setup_auth_token(utils.VALID_TOKEN)
self.read_only_auth = self.setup_auth_token(utils.READ_ONLY_TOKEN)
def setup_auth_token(self, token):
"""
Sets up authorization key file in both functional test buckets.
Args:
token: string
Returns:
dict: Authorization header.
"""
self.add_auth_token(token, "test")
self.add_auth_token(token, "bucket2")
return utils.auth_header(token)
def add_auth_token(self, token, bucket_name):
"""
Adds an auth token to the bucket represented by the
bucket_name provided. Note: This token must be defined
in tests.permission_utils.get_permissions
Args:
token(string)
bucket_name(string)
"""
key_name = "_keys/{0}".format(token)
permissions = utils.get_permissions(token)
bucket = self.boto_connection.get_bucket(bucket_name)
auth_key = Key(bucket, key_name)
auth_key.set_contents_from_string(permissions)
def create_metadata_builder(self):
return MetadataBuilder()
@property
def route_tester(self):
if not self._route_tester:
self._route_tester = Tester(self, self.test_client)
return self._route_tester
def tearDown(self):
self.moto_s3.stop()
self.search_wrapper.teardown_metadata()
super(TestBase, self).tearDown()
def response_500(self, message=None):
if not message:
message = "Internal server error"
return {
"message": message,
"code": "internal_server_error"
}
|
mit
|
marguslaak/django-xadmin
|
demo_app/demo/settings.py
|
1
|
5983
|
# Django settings for wictrl project.
import sys
import os.path
reload(sys)
sys.setdefaultencoding('utf-8')
gettext = lambda s: s
PROJECT_ROOT = os.path.join(
os.path.realpath(os.path.dirname(__file__)), os.pardir)
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_ROOT, 'data.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = '*'
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
LANGUAGES = (
('en', gettext('English')),
('zh_CN', gettext('Chinese')),
)
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = 'static/'
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '5=!nss_+^nvyyc_j(tdcf!7(_una*3gtw+_8v5jaa=)j0g^d_2'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'demo.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'demo.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(PROJECT_ROOT, "templates"),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'xadmin',
'crispy_forms',
'reversion',
'app',
)
DATE_FORMAT = 'Y-m-d'
DATETIME_FORMAT = 'Y-m-d H:i'
TIME_FORMAT = 'H:i'
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler'
},
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
# 'django.db.backends': {
# 'handlers': ['console'],
# 'level': 'DEBUG',
# }
}
}
|
bsd-3-clause
|
Aqkotz/Picture
|
ggame/pygamedeps.py
|
228
|
12266
|
def module_exists(module_name):
try:
__import__(module_name)
except ImportError:
return False
else:
return True
# PYTHON 3 and PYGAME DEPENDENCIES
if module_exists('pygame'):
import pygame
class _body(object):
def __init__(self):
self.events = {}
def appendChild(self, obj):
self.child = obj
def bind(self, evt, action):
self.events[evt] = action
print("Binding {} to {}".format(evt, action))
class _document(object):
def __init__(self):
self.body = _body()
class _window(object):
def __init__(self):
self.document = _document()
self.animatex = 0
def open(self, s1, s2):
return self
def requestAnimationFrame(self, target):
if self.animatex < 10:
self.animatex += 1
target('dummy')
print("Animation frame")
class _Container(object):
def __init__(self):
pass
def destroy(self):
pass
class _Renderer(object):
def __init__(self, x, y, argsdict):
self.x = x
self.y = y
self.argsdict = argsdict
self.view = 'view'
print("Rendering created with {}x{} area".format(x, y))
def render(self, stage):
pass
class _GFX(object):
def __init__(self):
self.Container = _Container
self.autoDetectRenderer = _Renderer
window = _window()
GFX = _GFX()
#document = object()
def JSConstructor(cls):
return cls
def JSObject(obj):
return obj
class _GFX_Rectangle(pygame.Rect):
pass
GFX_Rectangle = _GFX_Rectangle
class _Texture(object):
def __init__(self, img='', crossdomain=False):
self.name = img
if not img == '':
self.img = pygame.image.load(img) # pygame surface
self.basewidth = self.img.get_width()
self.baseheight = self.img.get_height()
self.width = self.basewidth
self.height = self.baseheight
print("Texture from image {}, {}x{} pixels".format(img, self.basewidth, self.baseheight))
self.baserect = _GFX_Rectangle(0, 0, self.basewidth, self.baseheight)
self.framerect = self.baserect
@classmethod
def fromTexture(cls, texture, frame):
inst = cls()
inst.img = pygame.Surface((frame.width, frame.height))
inst.img.blit(texture.img, (0,0), frame)
inst.name = texture.name
inst.basewidth = texture.basewidth
inst.baseheight = texture.baseheight
inst.baserect = texture.baserect
inst.framerect = frame
inst.width = frame.width
inst.height = frame.height
print("Texture from base texture {}, {}x{} subframe {}x{}".format(inst.name, inst.basewidth, inst.baseheight, inst.framerect.width, inst.framerect.height))
return inst
def destroy(self):
try:
self.img.close()
print("Destroying an image")
except:
print("Destroying a non-image")
GFX_Texture = _Texture.fromTexture
GFX_Texture_fromImage = _Texture
class vector(object):
def __init__(self, x, y):
self.x = x
self.y = y
def __getitem__(self, key):
if key == 0:
return self.x
elif key == 1:
return self.y
else:
raise KeyError
def __setitem(self, key, value):
if key == 0:
self.x = value
elif key == 1:
self.y = value
else:
raise KeyError
class GFX_Sprite(object):
def __init__(self, texture):
self.basetexture = texture
self.texture = self.basetexture
self.visible = True
self.pos = vector(0,0)
self.anch = vector(0,0)
self.scal = vector(1.0, 1.0)
self.width = texture.width
self.height = texture.height
self.rotation = 0.0
@property
def position(self):
return self.pos
@position.setter
def position(self, value):
self.pos.x = value[0]
self.pos.y = value[1]
@property
def anchor(self):
return self.anch
@anchor.setter
def anchor(self, value):
self.anch.x = value[0]
self.anch.y = value[1]
@property
def scale(self):
return self.scal
@scale.setter
def scale(self, value):
self.scal.x = value[0]
self.scal.y = value[1]
def destroy(self):
pass
class _GFX_Graphics(object):
def __init__(self):
self.clear()
def clear(self):
self.cleared = True
self.visible = True
self.lwidth = None
self.color = None
self.alpha = None
self.fillcolor = None
self.fillalpha = None
self.x = None
self.y = None
self.rwidth = None
self.rheight = None
self.radius = None
self.ehw = None
self.ehh = None
self.xto = None
self.yto = None
self.jpath = None
self.width = None
self.height = None
self.position = vector(0,0)
def destroy(self):
self.clear()
def clone(self):
clone = type(self)()
clone.cleared = self.cleared
clone.visible = self.visible
clone.lwidth = self.lwidth
clone.color = self.color
clone.alpha = self.alpha
clone.fillalpha = self.fillalpha
clone.fillcolor = self.fillcolor
clone.x = self.x
clone.y = self.y
clone.rwidth = self.rwidth
clone.rheight = self.rheight
clone.radius = self.radius
clone.ehw = self.ehw
clone.ehh = self.ehh
clone.xto = self.xto
clone.yto = self.yto
clone.jpath = self.jpath
clone.width = self.width
clone.height = self.height
clone.position = self.position
return clone
def lineStyle(self, width, color, alpha):
self.width = width
self.color = color
self.alpha = alpha
def beginFill(self, color, alpha):
self.fillcolor = color
self.fillalpha = alpha
def drawRect(self, x, y, w, h):
self.x = x
self.y = y
self.position = vector(x,y)
self.rwidth = w
self.rheight = h
self.width = w
self.height = h
self.cleared = False
print("Rectangle {}x{} at {},{}".format(w,h,x,y))
return self
def drawCircle(self, x, y, radius):
self.x = x
self.y = y
self.position = vector(x,y)
self.radius = radius
self.cleared = False
self.width = radius*2
self.height = radius*2
print("Circle, radius {} at {},{}".format(radius,x,y))
return self
def drawEllipse(self, x, y, hw, hh):
self.x = x
self.y = y
self.position = vector(x,y)
self.ehw = hw
self.ehh = hh
self.width = hw*2
self.height = hh*2
self.cleared = False
print("Ellipse, {}x{} at {},{}".format(hw,hh,x,y))
return self
def drawPolygon(self, jpath):
self.jpath = jpath
self.cleared = False
self.position = vector(jpath[0],jpath[1])
x = []
y = []
for i in range(0,len(jpath)-1,2):
x.append(jpath[i])
y.append(jpath[i+1])
self.width = max(x)-min(x)
self.height = max(y)-min(y)
print("Polygon")
return self
def moveTo(self, x, y):
self.x = x
self.y = y
self.position = vector(x,y)
return self
def lineTo(self, x, y):
self.xto = x
self.yto = y
self.width = abs(x)
self.height = abs(y)
self.cleared = False
print("Line from {},{} to {},{}".format(self.x, self.y, x, y))
return self
class _GFX_Text(object):
def __init__(self, text, styledict):
self.text = text
self.styledict = styledict
self.alpha = None
self.visible = None
self.width = 99
self.height = 99
self.position = vector(0,0)
print("Text: {} in {}".format(text, styledict['font']))
def clone(self):
clone = type(self)(self.text, self.styledict)
return clone
def destroy(self):
self.text = ''
GFX_Text = _GFX_Text
_globalGraphics = _GFX_Graphics()
GFX_Graphics = _globalGraphics
def GFX_DetectRenderer():
pass
class _SND_all(object):
def __init__(self):
pass
def stop(self):
print("Stopping all sounds")
class _SND(object):
def __init__(self):
self.all = _SND_all
SND = _SND()
class _SND_Sound(object):
def __init__(self, url):
self.url = url
print("Creating sound object {}".format(url))
def load(self):
pass
def play(self):
print("Playing sound object {}".format(self.url))
SND_Sound = _SND_Sound
class HwEvent(object):
evtmap = {2: 'keydown', 3: 'keyup', 4: 'mousemove', 5: 'mousedown', 6: 'mouseup'}
keymap = {304:16,
303:16,
306:17,
308:18,
301:20,
276:37,
273:38,
275:39,
274:40,
97:65,
98:66,
99:67,
100:68,
101:69,
102:70,
103:71,
104:72,
105:73,
106:74,
107:75,
108:76,
109:77,
110:78,
111:79,
112:80,
113:81,
114:82,
115:83,
116:84,
117:85,
118:86,
119:87,
120:88,
121:89,
122:90,
282:112,
283:113,
284:114,
285:115,
286:116,
287:117,
288:118,
289:119,
290:120,
291:121,
292:122,
293:123,
59:186,
61:187,
44:188,
46:190,
45:189,
47:191,
96:192,
92:220,
91:219,
93:221,
39:222}
def __init__(self, pevent):
self.type = HwEvent.evtmap.get(pevent.type, None)
if self.type in ['keydown', 'keyup']:
self.keyCode = HwEvent.keymap.get(pevent.key, pevent.key)
elif self.type in ['mousemove', 'mousedown', 'mouseup']:
self.wheelDelta = 0
if self.type != 'mousemove' and pevent.button == 5:
if self.type == 'mousedown':
self.wheelDelta = 1
else:
self.wheelDelta = -1
self.clientX = pevent.pos[0]
self.clientY = pevent.pos[1]
class GFX_Window(object):
def __init__(self, width, height, onclose):
pygame.init()
self._w = pygame.display.set_mode((width, height))
self.clock = pygame.time.Clock()
self.sprites = []
self.animatestarted = False
self.bindings = {}
self.onclose = onclose
self.stop = False
#self._w = window.open("", "")
#self._stage = JSConstructor(GFX.Container)()
#self._renderer = GFX.autoDetectRenderer(width, height, {'transparent':True})
#self._w.document.body.appendChild(self._renderer.view)
#self._w.onunload = onclose
def bind(self, evtspec, callback):
self.bindings[evtspec] = callback
def add(self, obj):
self.sprites.append(obj)
#self._stage.addChild(obj)
def remove(self, obj):
self.sprites.remove(obj)
#self._stage.removeChild(obj)
def animate(self, stepcallback):
# do stuff required to display
self._w.fill(pygame.Color('white'))
for s in self.sprites:
self._w.blit(s.texture.img, (s.pos.x, s.pos.y))
pygame.display.flip()
events = pygame.event.get()
for event in events:
hwevent = HwEvent(event)
if hwevent.type != None:
self.bindings[hwevent.type](hwevent)
if event.type == 12:
print("Close!")
self.onclose()
self.destroy()
self.stop = True
if not self.animatestarted:
self.animatestarted = True
while not self.stop:
self.clock.tick_busy_loop(30)
stepcallback(0)
#self._renderer.render(self._stage)
#self._w.requestAnimationFrame(stepcallback)
def destroy(self):
pass
#SND.all().stop()
#self._stage.destroy()
|
mit
|
pschmitt/home-assistant
|
homeassistant/components/twilio_call/notify.py
|
25
|
1882
|
"""Twilio Call platform for notify component."""
import logging
import urllib
from twilio.base.exceptions import TwilioRestException
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TARGET,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.components.twilio import DATA_TWILIO
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_FROM_NUMBER = "from_number"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_FROM_NUMBER): vol.All(
cv.string, vol.Match(r"^\+?[1-9]\d{1,14}$")
)
}
)
def get_service(hass, config, discovery_info=None):
"""Get the Twilio Call notification service."""
return TwilioCallNotificationService(
hass.data[DATA_TWILIO], config[CONF_FROM_NUMBER]
)
class TwilioCallNotificationService(BaseNotificationService):
"""Implement the notification service for the Twilio Call service."""
def __init__(self, twilio_client, from_number):
"""Initialize the service."""
self.client = twilio_client
self.from_number = from_number
def send_message(self, message="", **kwargs):
"""Call to specified target users."""
targets = kwargs.get(ATTR_TARGET)
if not targets:
_LOGGER.info("At least 1 target is required")
return
if message.startswith(("http://", "https://")):
twimlet_url = message
else:
twimlet_url = "http://twimlets.com/message?Message="
twimlet_url += urllib.parse.quote(message, safe="")
for target in targets:
try:
self.client.calls.create(
to=target, url=twimlet_url, from_=self.from_number
)
except TwilioRestException as exc:
_LOGGER.error(exc)
|
apache-2.0
|
hajgato/easybuild-easyblocks
|
easybuild/easyblocks/q/qt.py
|
1
|
3295
|
##
# Copyright 2013 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
EasyBuild support for building and installing Qt, implemented as an easyblock
@author: Kenneth Hoste (Ghent University)
"""
import os
import easybuild.tools.toolchain as toolchain
from easybuild.easyblocks.generic.configuremake import ConfigureMake
from easybuild.tools.filetools import run_cmd_qa
class EB_Qt(ConfigureMake):
"""
Support for building and installing Qt.
"""
def configure_step(self):
"""Configure Qt using interactive `configure` script."""
self.cfg.update('configopts', '-release')
comp_fam = self.toolchain.comp_family()
if comp_fam in [toolchain.GCC]: #@UndefinedVariable
self.cfg.update('configopts', '-platform linux-g++-64')
elif comp_fam in [toolchain.INTELCOMP]: #@UndefinedVariable
self.cfg.update('configopts', '-platform linux-icc-64')
else:
self.log.error("Don't know which platform to set based on compiler family.")
cmd = "%s ./configure --prefix=%s %s" % (self.cfg['preconfigopts'], self.installdir, self.cfg['configopts'])
qa = {
"Type 'o' if you want to use the Open Source Edition.": 'o',
"Do you accept the terms of either license?": 'yes',
}
no_qa = [
"for .*pro",
r"%s.*" % os.getenv('CXX').replace('+', '\\+'), # need to escape + in 'g++'
"Reading .*",
"WARNING .*",
"Project MESSAGE:.*",
"rm -f .*",
]
run_cmd_qa(cmd, qa, no_qa=no_qa, log_all=True, simple=True)
def build_step(self):
"""Set $LD_LIBRARY_PATH before calling make, to ensure that all required libraries are found during linking."""
# cfr. https://elist.ornl.gov/pipermail/visit-developers/2011-September/010063.html
self.cfg.update('premakeopts', 'LD_LIBRARY_PATH=%s:$LD_LIBRARY_PATH' % os.path.join(self.cfg['start_dir'], 'lib'))
super(EB_Qt, self).build_step()
def sanity_check_step(self):
"""Custom sanity check for Qt."""
custom_paths = {
'files': ["lib/libQtCore.so"],
'dirs': ["bin", "include", "plugins"],
}
super(EB_Qt, self).sanity_check_step(custom_paths=custom_paths)
|
gpl-2.0
|
matiasherranz/keyczar
|
cpp/src/tools/swtoolkit/test/component_targets_xml_test.py
|
18
|
3997
|
#!/usr/bin/python2.4
# Copyright 2009, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Component targets XML test (MEDIUM test)."""
import sys
import TestFramework
def TestSConstruct(scons_globals):
"""Test SConstruct file.
Args:
scons_globals: Global variables dict from the SConscript file.
"""
# Get globals from SCons
Environment = scons_globals['Environment']
base_env = Environment(tools=['component_setup',
'component_targets_xml',])
base_env.Append(BUILD_COMPONENTS=['SConscript'])
windows_env = base_env.Clone(
tools=['target_platform_windows'],
BUILD_TYPE='dbg',
BUILD_TYPE_DESCRIPTION='Debug Windows build',
)
windows_env.Append(BUILD_GROUPS=['default'])
mac_env = base_env.Clone(
tools=['target_platform_mac'],
BUILD_TYPE='dbg',
BUILD_TYPE_DESCRIPTION='Debug Mac build',
)
mac_env.Append(BUILD_GROUPS=['default'])
linux_env = base_env.Clone(
tools=['target_platform_linux'],
BUILD_TYPE='dbg',
BUILD_TYPE_DESCRIPTION='Debug Linux build',
)
linux_env.Append(BUILD_GROUPS=['default'])
BuildComponents([windows_env, mac_env, linux_env])
sconscript_contents = """
Import('env')
env.ComponentProgram('hello', 'hello.c')
env.ComponentLibrary('foo', 'foo.c')
"""
expected_xml_win = """<?xml version="1.0" encoding="UTF-8"?>
<help>
<mode_list>
<build_mode description="Debug Windows build" name="dbg"/>
</mode_list>
<target_groups>
<target_group name="all_libraries">
<build_target name="foo">
<target_mode name="dbg">
<target_property name="TARGET_PATH" value="foo.lib"/>
</target_mode>
</build_target>
</target_group>
<target_group name="all_programs">
<build_target name="hello">
<target_mode name="dbg">
<target_property name="TARGET_PATH" value="hello.exe"/>
</target_mode>
</build_target>
</target_group>
</target_groups>
</help>
"""
def main():
test = TestFramework.TestFramework()
test.subdir('hello')
base = 'hello/'
test.WriteSConscript(base + 'SConstruct', TestSConstruct)
test.write(base + 'SConscript', sconscript_contents)
test.run(chdir=base, options='targets_xml')
test.must_exist(base + 'scons-out/targets.xml')
# Check platform-specific XML output
if sys.platform in ('win32', 'cygwin'):
test.must_match(base + 'scons-out/targets.xml', expected_xml_win)
# TODO: check on mac, linux
test.pass_test()
if __name__ == '__main__':
main()
|
apache-2.0
|
codekaki/odoo
|
addons/auth_ldap/users_ldap.py
|
19
|
10782
|
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import ldap
import logging
from ldap.filter import filter_format
import openerp.exceptions
from openerp import tools
from openerp.osv import fields, osv
from openerp import SUPERUSER_ID
from openerp.modules.registry import RegistryManager
_logger = logging.getLogger(__name__)
class CompanyLDAP(osv.osv):
_name = 'res.company.ldap'
_order = 'sequence'
_rec_name = 'ldap_server'
def get_ldap_dicts(self, cr, ids=None):
"""
Retrieve res_company_ldap resources from the database in dictionary
format.
:param list ids: Valid ids of model res_company_ldap. If not \
specified, process all resources (unlike other ORM methods).
:return: ldap configurations
:rtype: list of dictionaries
"""
if ids:
id_clause = 'AND id IN (%s)'
args = [tuple(ids)]
else:
id_clause = ''
args = []
cr.execute("""
SELECT id, company, ldap_server, ldap_server_port, ldap_binddn,
ldap_password, ldap_filter, ldap_base, "user", create_user,
ldap_tls
FROM res_company_ldap
WHERE ldap_server != '' """ + id_clause + """ ORDER BY sequence
""", args)
return cr.dictfetchall()
def connect(self, conf):
"""
Connect to an LDAP server specified by an ldap
configuration dictionary.
:param dict conf: LDAP configuration
:return: an LDAP object
"""
uri = 'ldap://%s:%d' % (conf['ldap_server'],
conf['ldap_server_port'])
connection = ldap.initialize(uri)
if conf['ldap_tls']:
connection.start_tls_s()
return connection
def authenticate(self, conf, login, password):
"""
Authenticate a user against the specified LDAP server.
In order to prevent an unintended 'unauthenticated authentication',
which is an anonymous bind with a valid dn and a blank password,
check for empty passwords explicitely (:rfc:`4513#section-6.3.1`)
:param dict conf: LDAP configuration
:param login: username
:param password: Password for the LDAP user
:return: LDAP entry of authenticated user or False
:rtype: dictionary of attributes
"""
if not password:
return False
entry = False
filter = filter_format(conf['ldap_filter'], (login,))
try:
results = self.query(conf, filter)
# Get rid of (None, attrs) for searchResultReference replies
results = [i for i in results if i[0]]
if results and len(results) == 1:
dn = results[0][0]
conn = self.connect(conf)
conn.simple_bind_s(dn, password)
conn.unbind()
entry = results[0]
except ldap.INVALID_CREDENTIALS:
return False
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return entry
def query(self, conf, filter, retrieve_attributes=None):
"""
Query an LDAP server with the filter argument and scope subtree.
Allow for all authentication methods of the simple authentication
method:
- authenticated bind (non-empty binddn + valid password)
- anonymous bind (empty binddn + empty password)
- unauthenticated authentication (non-empty binddn + empty password)
.. seealso::
:rfc:`4513#section-5.1` - LDAP: Simple Authentication Method.
:param dict conf: LDAP configuration
:param filter: valid LDAP filter
:param list retrieve_attributes: LDAP attributes to be retrieved. \
If not specified, return all attributes.
:return: ldap entries
:rtype: list of tuples (dn, attrs)
"""
results = []
try:
conn = self.connect(conf)
conn.simple_bind_s(conf['ldap_binddn'] or '',
conf['ldap_password'] or '')
results = conn.search_st(conf['ldap_base'], ldap.SCOPE_SUBTREE,
filter, retrieve_attributes, timeout=60)
conn.unbind()
except ldap.INVALID_CREDENTIALS:
_logger.error('LDAP bind failed.')
except ldap.LDAPError, e:
_logger.error('An LDAP exception occurred: %s', e)
return results
def map_ldap_attributes(self, cr, uid, conf, login, ldap_entry):
"""
Compose values for a new resource of model res_users,
based upon the retrieved ldap entry and the LDAP settings.
:param dict conf: LDAP configuration
:param login: the new user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: parameters for a new resource of model res_users
:rtype: dict
"""
values = { 'name': ldap_entry[1]['cn'][0],
'login': login,
'company_id': conf['company']
}
return values
def get_or_create_user(self, cr, uid, conf, login, ldap_entry,
context=None):
"""
Retrieve an active resource of model res_users with the specified
login. Create the user if it is not initially found.
:param dict conf: LDAP configuration
:param login: the user's login
:param tuple ldap_entry: single LDAP result (dn, attrs)
:return: res_users id
:rtype: int
"""
user_id = False
login = tools.ustr(login.lower())
cr.execute("SELECT id, active FROM res_users WHERE lower(login)=%s", (login,))
res = cr.fetchone()
if res:
if res[1]:
user_id = res[0]
elif conf['create_user']:
_logger.debug("Creating new OpenERP user \"%s\" from LDAP" % login)
user_obj = self.pool.get('res.users')
values = self.map_ldap_attributes(cr, uid, conf, login, ldap_entry)
if conf['user']:
values['active'] = True
user_id = user_obj.copy(cr, SUPERUSER_ID, conf['user'],
default=values)
else:
user_id = user_obj.create(cr, SUPERUSER_ID, values)
return user_id
_columns = {
'sequence': fields.integer('Sequence'),
'company': fields.many2one('res.company', 'Company', required=True,
ondelete='cascade'),
'ldap_server': fields.char('LDAP Server address', size=64, required=True),
'ldap_server_port': fields.integer('LDAP Server port', required=True),
'ldap_binddn': fields.char('LDAP binddn', size=64,
help=("The user account on the LDAP server that is used to query "
"the directory. Leave empty to connect anonymously.")),
'ldap_password': fields.char('LDAP password', size=64,
help=("The password of the user account on the LDAP server that is "
"used to query the directory.")),
'ldap_filter': fields.char('LDAP filter', size=256, required=True),
'ldap_base': fields.char('LDAP base', size=64, required=True),
'user': fields.many2one('res.users', 'Template User',
help="User to copy when creating new users"),
'create_user': fields.boolean('Create user',
help="Automatically create local user accounts for new users authenticating via LDAP"),
'ldap_tls': fields.boolean('Use TLS',
help="Request secure TLS/SSL encryption when connecting to the LDAP server. "
"This option requires a server with STARTTLS enabled, "
"otherwise all authentication attempts will fail."),
}
_defaults = {
'ldap_server': '127.0.0.1',
'ldap_server_port': 389,
'sequence': 10,
'create_user': True,
}
CompanyLDAP()
class res_company(osv.osv):
_inherit = "res.company"
_columns = {
'ldaps': fields.one2many(
'res.company.ldap', 'company', 'LDAP Parameters', groups="base.group_system"),
}
res_company()
class users(osv.osv):
_inherit = "res.users"
def login(self, db, login, password):
user_id = super(users, self).login(db, login, password)
if user_id:
return user_id
registry = RegistryManager.get(db)
with registry.cursor() as cr:
cr.execute("SELECT id FROM res_users WHERE lower(login)=%s", (login,))
res = cr.fetchone()
if res:
return False
ldap_obj = registry.get('res.company.ldap')
for conf in ldap_obj.get_ldap_dicts(cr):
entry = ldap_obj.authenticate(conf, login, password)
if entry:
user_id = ldap_obj.get_or_create_user(
cr, SUPERUSER_ID, conf, login, entry)
if user_id:
break
return user_id
def check_credentials(self, cr, uid, password):
try:
super(users, self).check_credentials(cr, uid, password)
except openerp.exceptions.AccessDenied:
cr.execute('SELECT login FROM res_users WHERE id=%s AND active=TRUE',
(int(uid),))
res = cr.fetchone()
if res:
ldap_obj = self.pool['res.company.ldap']
for conf in ldap_obj.get_ldap_dicts(cr):
if ldap_obj.authenticate(conf, res[0], password):
return
raise
users()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
lukas-hetzenecker/home-assistant
|
homeassistant/components/sensor/darksky.py
|
1
|
11778
|
"""
Support for Dark Sky weather service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.darksky/
"""
import logging
from datetime import timedelta
import voluptuous as vol
from requests.exceptions import ConnectionError as ConnectError, \
HTTPError, Timeout
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_API_KEY, CONF_NAME, CONF_MONITORED_CONDITIONS, ATTR_ATTRIBUTION)
from homeassistant.helpers.entity import Entity
from homeassistant.util import Throttle
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['python-forecastio==1.3.5']
_LOGGER = logging.getLogger(__name__)
CONF_ATTRIBUTION = "Powered by Dark Sky"
CONF_UNITS = 'units'
CONF_UPDATE_INTERVAL = 'update_interval'
DEFAULT_NAME = 'Dark Sky'
# Sensor types are defined like so:
# Name, si unit, us unit, ca unit, uk unit, uk2 unit
SENSOR_TYPES = {
'summary': ['Summary', None, None, None, None, None, None],
'minutely_summary': ['Minutely Summary',
None, None, None, None, None, None],
'hourly_summary': ['Hourly Summary', None, None, None, None, None, None],
'daily_summary': ['Daily Summary', None, None, None, None, None, None],
'icon': ['Icon', None, None, None, None, None, None],
'nearest_storm_distance': ['Nearest Storm Distance',
'km', 'm', 'km', 'km', 'm',
'mdi:weather-lightning'],
'nearest_storm_bearing': ['Nearest Storm Bearing',
'°', '°', '°', '°', '°',
'mdi:weather-lightning'],
'precip_type': ['Precip', None, None, None, None, None,
'mdi:weather-pouring'],
'precip_intensity': ['Precip Intensity',
'mm', 'in', 'mm', 'mm', 'mm', 'mdi:weather-rainy'],
'precip_probability': ['Precip Probability',
'%', '%', '%', '%', '%', 'mdi:water-percent'],
'temperature': ['Temperature',
'°C', '°F', '°C', '°C', '°C', 'mdi:thermometer'],
'apparent_temperature': ['Apparent Temperature',
'°C', '°F', '°C', '°C', '°C', 'mdi:thermometer'],
'dew_point': ['Dew point', '°C', '°F', '°C', '°C', '°C',
'mdi:thermometer'],
'wind_speed': ['Wind Speed', 'm/s', 'mph', 'km/h', 'mph', 'mph',
'mdi:weather-windy'],
'wind_bearing': ['Wind Bearing', '°', '°', '°', '°', '°', 'mdi:compass'],
'cloud_cover': ['Cloud Coverage', '%', '%', '%', '%', '%',
'mdi:weather-partlycloudy'],
'humidity': ['Humidity', '%', '%', '%', '%', '%', 'mdi:water-percent'],
'pressure': ['Pressure', 'mbar', 'mbar', 'mbar', 'mbar', 'mbar',
'mdi:gauge'],
'visibility': ['Visibility', 'km', 'm', 'km', 'km', 'm', 'mdi:eye'],
'ozone': ['Ozone', 'DU', 'DU', 'DU', 'DU', 'DU', 'mdi:eye'],
'apparent_temperature_max': ['Daily High Apparent Temperature',
'°C', '°F', '°C', '°C', '°C',
'mdi:thermometer'],
'apparent_temperature_min': ['Daily Low Apparent Temperature',
'°C', '°F', '°C', '°C', '°C',
'mdi:thermometer'],
'temperature_max': ['Daily High Temperature',
'°C', '°F', '°C', '°C', '°C', 'mdi:thermometer'],
'temperature_min': ['Daily Low Temperature',
'°C', '°F', '°C', '°C', '°C', 'mdi:thermometer'],
'precip_intensity_max': ['Daily Max Precip Intensity',
'mm', 'in', 'mm', 'mm', 'mm', 'mdi:thermometer'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_UNITS): vol.In(['auto', 'si', 'us', 'ca', 'uk', 'uk2']),
vol.Optional(CONF_UPDATE_INTERVAL, default=timedelta(seconds=120)): (
vol.All(cv.time_period, cv.positive_timedelta)),
})
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Dark Sky sensor."""
# Validate the configuration
if None in (hass.config.latitude, hass.config.longitude):
_LOGGER.error("Latitude or longitude not set in Home Assistant config")
return False
if CONF_UNITS in config:
units = config[CONF_UNITS]
elif hass.config.units.is_metric:
units = 'si'
else:
units = 'us'
# Create a data fetcher to support all of the configured sensors. Then make
# the first call to init the data and confirm we can connect.
try:
forecast_data = DarkSkyData(
api_key=config.get(CONF_API_KEY, None),
latitude=hass.config.latitude,
longitude=hass.config.longitude,
units=units,
interval=config.get(CONF_UPDATE_INTERVAL))
forecast_data.update_currently()
except ValueError as error:
_LOGGER.error(error)
return False
name = config.get(CONF_NAME)
sensors = []
for variable in config[CONF_MONITORED_CONDITIONS]:
sensors.append(DarkSkySensor(forecast_data, variable, name))
add_devices(sensors)
class DarkSkySensor(Entity):
"""Implementation of a Dark Sky sensor."""
def __init__(self, forecast_data, sensor_type, name):
"""Initialize the sensor."""
self.client_name = name
self._name = SENSOR_TYPES[sensor_type][0]
self.forecast_data = forecast_data
self.type = sensor_type
self._state = None
self._unit_of_measurement = None
self.update()
@property
def name(self):
"""Return the name of the sensor."""
return '{} {}'.format(self.client_name, self._name)
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
@property
def unit_system(self):
"""Return the unit system of this entity."""
return self.forecast_data.unit_system
def update_unit_of_measurement(self):
"""Update units based on unit system."""
unit_index = {
'si': 1,
'us': 2,
'ca': 3,
'uk': 4,
'uk2': 5
}.get(self.unit_system, 1)
self._unit_of_measurement = SENSOR_TYPES[self.type][unit_index]
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return SENSOR_TYPES[self.type][6]
@property
def device_state_attributes(self):
"""Return the state attributes."""
return {
ATTR_ATTRIBUTION: CONF_ATTRIBUTION,
}
def update(self):
"""Get the latest data from Dark Sky and updates the states."""
# Call the API for new forecast data. Each sensor will re-trigger this
# same exact call, but that's fine. We cache results for a short period
# of time to prevent hitting API limits. Note that Dark Sky will
# charge users for too many calls in 1 day, so take care when updating.
self.forecast_data.update()
self.update_unit_of_measurement()
if self.type == 'minutely_summary':
self.forecast_data.update_minutely()
minutely = self.forecast_data.data_minutely
self._state = getattr(minutely, 'summary', '')
elif self.type == 'hourly_summary':
self.forecast_data.update_hourly()
hourly = self.forecast_data.data_hourly
self._state = getattr(hourly, 'summary', '')
elif self.type in ['daily_summary',
'temperature_min',
'temperature_max',
'apparent_temperature_min',
'apparent_temperature_max',
'precip_intensity_max']:
self.forecast_data.update_daily()
daily = self.forecast_data.data_daily
if self.type == 'daily_summary':
self._state = getattr(daily, 'summary', '')
else:
if hasattr(daily, 'data'):
self._state = self.get_state(daily.data[0])
else:
self._state = 0
else:
self.forecast_data.update_currently()
currently = self.forecast_data.data_currently
self._state = self.get_state(currently)
def get_state(self, data):
"""
Helper function that returns a new state based on the type.
If the sensor type is unknown, the current state is returned.
"""
lookup_type = convert_to_camel(self.type)
state = getattr(data, lookup_type, 0)
# Some state data needs to be rounded to whole values or converted to
# percentages
if self.type in ['precip_probability', 'cloud_cover', 'humidity']:
return round(state * 100, 1)
elif (self.type in ['dew_point', 'temperature', 'apparent_temperature',
'temperature_min', 'temperature_max',
'apparent_temperature_min',
'apparent_temperature_max',
'pressure', 'ozone']):
return round(state, 1)
return state
def convert_to_camel(data):
"""
Convert snake case (foo_bar_bat) to camel case (fooBarBat).
This is not pythonic, but needed for certain situations
"""
components = data.split('_')
return components[0] + "".join(x.title() for x in components[1:])
class DarkSkyData(object):
"""Get the latest data from Darksky."""
def __init__(self, api_key, latitude, longitude, units, interval):
"""Initialize the data object."""
self._api_key = api_key
self.latitude = latitude
self.longitude = longitude
self.units = units
self.data = None
self.unit_system = None
self.data_currently = None
self.data_minutely = None
self.data_hourly = None
self.data_daily = None
# Apply throttling to methods using configured interval
self.update = Throttle(interval)(self._update)
self.update_currently = Throttle(interval)(self._update_currently)
self.update_minutely = Throttle(interval)(self._update_minutely)
self.update_hourly = Throttle(interval)(self._update_hourly)
self.update_daily = Throttle(interval)(self._update_daily)
self.update()
def _update(self):
"""Get the latest data from Dark Sky."""
import forecastio
try:
self.data = forecastio.load_forecast(
self._api_key, self.latitude, self.longitude, units=self.units)
except (ConnectError, HTTPError, Timeout, ValueError) as error:
raise ValueError("Unable to init Dark Sky. %s", error)
self.unit_system = self.data.json['flags']['units']
def _update_currently(self):
"""Update currently data."""
self.data_currently = self.data.currently()
def _update_minutely(self):
"""Update minutely data."""
self.data_minutely = self.data.minutely()
def _update_hourly(self):
"""Update hourly data."""
self.data_hourly = self.data.hourly()
def _update_daily(self):
"""Update daily data."""
self.data_daily = self.data.daily()
|
mit
|
ppietrasa/grpc
|
tools/run_tests/run_tests.py
|
1
|
54046
|
#!/usr/bin/env python2.7
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run tests in parallel."""
from __future__ import print_function
import argparse
import ast
import collections
import glob
import itertools
import json
import multiprocessing
import os
import os.path
import pipes
import platform
import random
import re
import socket
import subprocess
import sys
import tempfile
import traceback
import time
from six.moves import urllib
import uuid
import python_utils.jobset as jobset
import python_utils.report_utils as report_utils
import python_utils.watch_dirs as watch_dirs
_ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(_ROOT)
_FORCE_ENVIRON_FOR_WRAPPERS = {
'GRPC_VERBOSITY': 'DEBUG',
}
_POLLING_STRATEGIES = {
'linux': ['epoll', 'poll', 'poll-cv']
}
def platform_string():
return jobset.platform_string()
_DEFAULT_TIMEOUT_SECONDS = 5 * 60
# SimpleConfig: just compile with CONFIG=config, and run the binary to test
class Config(object):
def __init__(self, config, environ=None, timeout_multiplier=1, tool_prefix=[], iomgr_platform='native'):
if environ is None:
environ = {}
self.build_config = config
self.environ = environ
self.environ['CONFIG'] = config
self.tool_prefix = tool_prefix
self.timeout_multiplier = timeout_multiplier
self.iomgr_platform = iomgr_platform
def job_spec(self, cmdline, timeout_seconds=_DEFAULT_TIMEOUT_SECONDS,
shortname=None, environ={}, cpu_cost=1.0, flaky=False):
"""Construct a jobset.JobSpec for a test under this config
Args:
cmdline: a list of strings specifying the command line the test
would like to run
"""
actual_environ = self.environ.copy()
for k, v in environ.items():
actual_environ[k] = v
return jobset.JobSpec(cmdline=self.tool_prefix + cmdline,
shortname=shortname,
environ=actual_environ,
cpu_cost=cpu_cost,
timeout_seconds=(self.timeout_multiplier * timeout_seconds if timeout_seconds else None),
flake_retries=5 if flaky or args.allow_flakes else 0,
timeout_retries=3 if args.allow_flakes else 0)
def get_c_tests(travis, test_lang) :
out = []
platforms_str = 'ci_platforms' if travis else 'platforms'
with open('tools/run_tests/generated/tests.json') as f:
js = json.load(f)
return [tgt
for tgt in js
if tgt['language'] == test_lang and
platform_string() in tgt[platforms_str] and
not (travis and tgt['flaky'])]
def _check_compiler(compiler, supported_compilers):
if compiler not in supported_compilers:
raise Exception('Compiler %s not supported (on this platform).' % compiler)
def _check_arch(arch, supported_archs):
if arch not in supported_archs:
raise Exception('Architecture %s not supported.' % arch)
def _is_use_docker_child():
"""Returns True if running running as a --use_docker child."""
return True if os.getenv('RUN_TESTS_COMMAND') else False
_PythonConfigVars = collections.namedtuple(
'_ConfigVars', ['shell', 'builder', 'builder_prefix_arguments',
'venv_relative_python', 'toolchain', 'runner'])
def _python_config_generator(name, major, minor, bits, config_vars):
return PythonConfig(
name,
config_vars.shell + config_vars.builder + config_vars.builder_prefix_arguments + [
_python_pattern_function(major=major, minor=minor, bits=bits)] + [
name] + config_vars.venv_relative_python + config_vars.toolchain,
config_vars.shell + config_vars.runner + [
os.path.join(name, config_vars.venv_relative_python[0])])
def _pypy_config_generator(name, major, config_vars):
return PythonConfig(
name,
config_vars.shell + config_vars.builder + config_vars.builder_prefix_arguments + [
_pypy_pattern_function(major=major)] + [
name] + config_vars.venv_relative_python + config_vars.toolchain,
config_vars.shell + config_vars.runner + [
os.path.join(name, config_vars.venv_relative_python[0])])
def _python_pattern_function(major, minor, bits):
# Bit-ness is handled by the test machine's environment
if os.name == "nt":
if bits == "64":
return '/c/Python{major}{minor}/python.exe'.format(
major=major, minor=minor, bits=bits)
else:
return '/c/Python{major}{minor}_{bits}bits/python.exe'.format(
major=major, minor=minor, bits=bits)
else:
return 'python{major}.{minor}'.format(major=major, minor=minor)
def _pypy_pattern_function(major):
if major == '2':
return 'pypy'
elif major == '3':
return 'pypy3'
else:
raise ValueError("Unknown PyPy major version")
class CLanguage(object):
def __init__(self, make_target, test_lang):
self.make_target = make_target
self.platform = platform_string()
self.test_lang = test_lang
def configure(self, config, args):
self.config = config
self.args = args
if self.platform == 'windows':
self._make_options = [_windows_toolset_option(self.args.compiler),
_windows_arch_option(self.args.arch)]
else:
self._docker_distro, self._make_options = self._compiler_options(self.args.use_docker,
self.args.compiler)
if args.iomgr_platform == "uv":
cflags = '-DGRPC_UV '
try:
cflags += subprocess.check_output(['pkg-config', '--cflags', 'libuv']).strip() + ' '
except (subprocess.CalledProcessError, OSError):
pass
try:
ldflags = subprocess.check_output(['pkg-config', '--libs', 'libuv']).strip() + ' '
except (subprocess.CalledProcessError, OSError):
ldflags = '-luv '
self._make_options += ['EXTRA_CPPFLAGS={}'.format(cflags),
'EXTRA_LDLIBS={}'.format(ldflags)]
def test_specs(self):
out = []
binaries = get_c_tests(self.args.travis, self.test_lang)
for target in binaries:
polling_strategies = (_POLLING_STRATEGIES.get(self.platform, ['all'])
if target.get('uses_polling', True)
else ['all'])
if self.args.iomgr_platform == 'uv':
polling_strategies = ['all']
for polling_strategy in polling_strategies:
env={'GRPC_DEFAULT_SSL_ROOTS_FILE_PATH':
_ROOT + '/src/core/lib/tsi/test_creds/ca.pem',
'GRPC_POLL_STRATEGY': polling_strategy,
'GRPC_VERBOSITY': 'DEBUG'}
shortname_ext = '' if polling_strategy=='all' else ' GRPC_POLL_STRATEGY=%s' % polling_strategy
timeout_scaling = 1
if polling_strategy == 'poll-cv':
timeout_scaling *= 5
if self.config.build_config in target['exclude_configs']:
continue
if self.args.iomgr_platform in target.get('exclude_iomgrs', []):
continue
if self.platform == 'windows':
binary = 'vsprojects/%s%s/%s.exe' % (
'x64/' if self.args.arch == 'x64' else '',
_MSBUILD_CONFIG[self.config.build_config],
target['name'])
else:
binary = 'bins/%s/%s' % (self.config.build_config, target['name'])
cpu_cost = target['cpu_cost']
if cpu_cost == 'capacity':
cpu_cost = multiprocessing.cpu_count()
if os.path.isfile(binary):
if 'gtest' in target and target['gtest']:
# here we parse the output of --gtest_list_tests to build up a
# complete list of the tests contained in a binary
# for each test, we then add a job to run, filtering for just that
# test
with open(os.devnull, 'w') as fnull:
tests = subprocess.check_output([binary, '--gtest_list_tests'],
stderr=fnull)
base = None
for line in tests.split('\n'):
i = line.find('#')
if i >= 0: line = line[:i]
if not line: continue
if line[0] != ' ':
base = line.strip()
else:
assert base is not None
assert line[1] == ' '
test = base + line.strip()
cmdline = [binary] + ['--gtest_filter=%s' % test]
out.append(self.config.job_spec(cmdline,
shortname='%s --gtest_filter=%s %s' % (binary, test, shortname_ext),
cpu_cost=cpu_cost,
timeout_seconds=_DEFAULT_TIMEOUT_SECONDS * timeout_scaling,
environ=env))
else:
cmdline = [binary] + target['args']
out.append(self.config.job_spec(cmdline,
shortname=' '.join(
pipes.quote(arg)
for arg in cmdline) +
shortname_ext,
cpu_cost=cpu_cost,
flaky=target.get('flaky', False),
timeout_seconds=target.get('timeout_seconds', _DEFAULT_TIMEOUT_SECONDS) * timeout_scaling,
environ=env))
elif self.args.regex == '.*' or self.platform == 'windows':
print('\nWARNING: binary not found, skipping', binary)
return sorted(out)
def make_targets(self):
if self.platform == 'windows':
# don't build tools on windows just yet
return ['buildtests_%s' % self.make_target]
return ['buildtests_%s' % self.make_target, 'tools_%s' % self.make_target]
def make_options(self):
return self._make_options;
def pre_build_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\pre_build_c.bat']]
else:
return []
def build_steps(self):
return []
def post_tests_steps(self):
if self.platform == 'windows':
return []
else:
return [['tools/run_tests/helper_scripts/post_tests_c.sh']]
def makefile_name(self):
return 'Makefile'
def _clang_make_options(self, version_suffix=''):
return ['CC=clang%s' % version_suffix,
'CXX=clang++%s' % version_suffix,
'LD=clang%s' % version_suffix,
'LDXX=clang++%s' % version_suffix]
def _gcc_make_options(self, version_suffix):
return ['CC=gcc%s' % version_suffix,
'CXX=g++%s' % version_suffix,
'LD=gcc%s' % version_suffix,
'LDXX=g++%s' % version_suffix]
def _compiler_options(self, use_docker, compiler):
"""Returns docker distro and make options to use for given compiler."""
if not use_docker and not _is_use_docker_child():
_check_compiler(compiler, ['default'])
if compiler == 'gcc4.9' or compiler == 'default':
return ('jessie', [])
elif compiler == 'gcc4.4':
return ('wheezy', self._gcc_make_options(version_suffix='-4.4'))
elif compiler == 'gcc4.6':
return ('wheezy', self._gcc_make_options(version_suffix='-4.6'))
elif compiler == 'gcc4.8':
return ('jessie', self._gcc_make_options(version_suffix='-4.8'))
elif compiler == 'gcc5.3':
return ('ubuntu1604', [])
elif compiler == 'clang3.4':
# on ubuntu1404, clang-3.4 alias doesn't exist, just use 'clang'
return ('ubuntu1404', self._clang_make_options())
elif compiler == 'clang3.5':
return ('jessie', self._clang_make_options(version_suffix='-3.5'))
elif compiler == 'clang3.6':
return ('ubuntu1604', self._clang_make_options(version_suffix='-3.6'))
elif compiler == 'clang3.7':
return ('ubuntu1604', self._clang_make_options(version_suffix='-3.7'))
else:
raise Exception('Compiler %s not supported.' % compiler)
def dockerfile_dir(self):
return 'tools/dockerfile/test/cxx_%s_%s' % (self._docker_distro,
_docker_arch_suffix(self.args.arch))
def __str__(self):
return self.make_target
class NodeLanguage(object):
def __init__(self):
self.platform = platform_string()
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default', 'node0.12',
'node4', 'node5', 'node6',
'node7'])
if self.args.compiler == 'default':
self.node_version = '4'
else:
# Take off the word "node"
self.node_version = self.args.compiler[4:]
def test_specs(self):
if self.platform == 'windows':
return [self.config.job_spec(['tools\\run_tests\\helper_scripts\\run_node.bat'])]
else:
return [self.config.job_spec(['tools/run_tests/helper_scripts/run_node.sh', self.node_version],
environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
def pre_build_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\pre_build_node.bat']]
else:
return [['tools/run_tests/helper_scripts/pre_build_node.sh', self.node_version]]
def make_targets(self):
return []
def make_options(self):
return []
def build_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\build_node.bat']]
else:
return [['tools/run_tests/helper_scripts/build_node.sh', self.node_version]]
def post_tests_steps(self):
return []
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/node_jessie_%s' % _docker_arch_suffix(self.args.arch)
def __str__(self):
return 'node'
class PhpLanguage(object):
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
return [self.config.job_spec(['src/php/bin/run_tests.sh'],
environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
def pre_build_steps(self):
return []
def make_targets(self):
return ['static_c', 'shared_c']
def make_options(self):
return []
def build_steps(self):
return [['tools/run_tests/helper_scripts/build_php.sh']]
def post_tests_steps(self):
return [['tools/run_tests/helper_scripts/post_tests_php.sh']]
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/php_jessie_%s' % _docker_arch_suffix(self.args.arch)
def __str__(self):
return 'php'
class Php7Language(object):
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
return [self.config.job_spec(['src/php/bin/run_tests.sh'],
environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
def pre_build_steps(self):
return []
def make_targets(self):
return ['static_c', 'shared_c']
def make_options(self):
return []
def build_steps(self):
return [['tools/run_tests/helper_scripts/build_php.sh']]
def post_tests_steps(self):
return [['tools/run_tests/helper_scripts/post_tests_php.sh']]
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/php7_jessie_%s' % _docker_arch_suffix(self.args.arch)
def __str__(self):
return 'php7'
class PythonConfig(collections.namedtuple('PythonConfig', [
'name', 'build', 'run'])):
"""Tuple of commands (named s.t. 'what it says on the tin' applies)"""
class PythonLanguage(object):
def configure(self, config, args):
self.config = config
self.args = args
self.pythons = self._get_pythons(self.args)
def test_specs(self):
# load list of known test suites
with open('src/python/grpcio_tests/tests/tests.json') as tests_json_file:
tests_json = json.load(tests_json_file)
environment = dict(_FORCE_ENVIRON_FOR_WRAPPERS)
return [self.config.job_spec(
config.run,
timeout_seconds=5*60,
environ=dict(list(environment.items()) +
[('GRPC_PYTHON_TESTRUNNER_FILTER', str(suite_name))]),
shortname='%s.test.%s' % (config.name, suite_name),)
for suite_name in tests_json
for config in self.pythons]
def pre_build_steps(self):
return []
def make_targets(self):
return []
def make_options(self):
return []
def build_steps(self):
return [config.build for config in self.pythons]
def post_tests_steps(self):
return []
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/python_%s_%s' % (self.python_manager_name(), _docker_arch_suffix(self.args.arch))
def python_manager_name(self):
return 'pyenv' if self.args.compiler in ['python3.5', 'python3.6'] else 'jessie'
def _get_pythons(self, args):
if args.arch == 'x86':
bits = '32'
else:
bits = '64'
if os.name == 'nt':
shell = ['bash']
builder = [os.path.abspath('tools/run_tests/helper_scripts/build_python_msys2.sh')]
builder_prefix_arguments = ['MINGW{}'.format(bits)]
venv_relative_python = ['Scripts/python.exe']
toolchain = ['mingw32']
else:
shell = []
builder = [os.path.abspath('tools/run_tests/helper_scripts/build_python.sh')]
builder_prefix_arguments = []
venv_relative_python = ['bin/python']
toolchain = ['unix']
runner = [os.path.abspath('tools/run_tests/helper_scripts/run_python.sh')]
config_vars = _PythonConfigVars(shell, builder, builder_prefix_arguments,
venv_relative_python, toolchain, runner)
python27_config = _python_config_generator(name='py27', major='2',
minor='7', bits=bits,
config_vars=config_vars)
python34_config = _python_config_generator(name='py34', major='3',
minor='4', bits=bits,
config_vars=config_vars)
python35_config = _python_config_generator(name='py35', major='3',
minor='5', bits=bits,
config_vars=config_vars)
python36_config = _python_config_generator(name='py36', major='3',
minor='6', bits=bits,
config_vars=config_vars)
pypy27_config = _pypy_config_generator(name='pypy', major='2',
config_vars=config_vars)
pypy32_config = _pypy_config_generator(name='pypy3', major='3',
config_vars=config_vars)
if args.compiler == 'default':
if os.name == 'nt':
return (python27_config,)
else:
return (python27_config, python34_config,)
elif args.compiler == 'python2.7':
return (python27_config,)
elif args.compiler == 'python3.4':
return (python34_config,)
elif args.compiler == 'python3.5':
return (python35_config,)
elif args.compiler == 'python3.6':
return (python36_config,)
elif args.compiler == 'pypy':
return (pypy27_config,)
elif args.compiler == 'pypy3':
return (pypy32_config,)
else:
raise Exception('Compiler %s not supported.' % args.compiler)
def __str__(self):
return 'python'
class RubyLanguage(object):
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
return [self.config.job_spec(['tools/run_tests/helper_scripts/run_ruby.sh'],
timeout_seconds=10*60,
environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
def pre_build_steps(self):
return [['tools/run_tests/helper_scripts/pre_build_ruby.sh']]
def make_targets(self):
return []
def make_options(self):
return []
def build_steps(self):
return [['tools/run_tests/helper_scripts/build_ruby.sh']]
def post_tests_steps(self):
return [['tools/run_tests/helper_scripts/post_tests_ruby.sh']]
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/ruby_jessie_%s' % _docker_arch_suffix(self.args.arch)
def __str__(self):
return 'ruby'
class CSharpLanguage(object):
def __init__(self):
self.platform = platform_string()
def configure(self, config, args):
self.config = config
self.args = args
if self.platform == 'windows':
# Explicitly choosing between x86 and x64 arch doesn't work yet
_check_arch(self.args.arch, ['default'])
# CoreCLR use 64bit runtime by default.
arch_option = 'x64' if self.args.compiler == 'coreclr' else self.args.arch
self._make_options = [_windows_toolset_option(self.args.compiler),
_windows_arch_option(arch_option)]
else:
_check_compiler(self.args.compiler, ['default', 'coreclr'])
if self.platform == 'linux' and self.args.compiler == 'coreclr':
self._docker_distro = 'coreclr'
else:
self._docker_distro = 'jessie'
if self.platform == 'mac':
# TODO(jtattermusch): EMBED_ZLIB=true currently breaks the mac build
self._make_options = ['EMBED_OPENSSL=true']
if self.args.compiler != 'coreclr':
# On Mac, official distribution of mono is 32bit.
self._make_options += ['CFLAGS=-m32', 'LDFLAGS=-m32']
else:
self._make_options = ['EMBED_OPENSSL=true', 'EMBED_ZLIB=true']
def test_specs(self):
with open('src/csharp/tests.json') as f:
tests_by_assembly = json.load(f)
msbuild_config = _MSBUILD_CONFIG[self.config.build_config]
nunit_args = ['--labels=All']
assembly_subdir = 'bin/%s' % msbuild_config
assembly_extension = '.exe'
if self.args.compiler == 'coreclr':
assembly_subdir += '/netcoreapp1.0'
runtime_cmd = ['dotnet', 'exec']
assembly_extension = '.dll'
else:
nunit_args += ['--noresult', '--workers=1']
if self.platform == 'windows':
runtime_cmd = []
else:
runtime_cmd = ['mono']
specs = []
for assembly in tests_by_assembly.iterkeys():
assembly_file = 'src/csharp/%s/%s/%s%s' % (assembly,
assembly_subdir,
assembly,
assembly_extension)
if self.config.build_config != 'gcov' or self.platform != 'windows':
# normally, run each test as a separate process
for test in tests_by_assembly[assembly]:
cmdline = runtime_cmd + [assembly_file, '--test=%s' % test] + nunit_args
specs.append(self.config.job_spec(cmdline,
shortname='csharp.%s' % test,
environ=_FORCE_ENVIRON_FOR_WRAPPERS))
else:
# For C# test coverage, run all tests from the same assembly at once
# using OpenCover.Console (only works on Windows).
cmdline = ['src\\csharp\\packages\\OpenCover.4.6.519\\tools\\OpenCover.Console.exe',
'-target:%s' % assembly_file,
'-targetdir:src\\csharp',
'-targetargs:%s' % ' '.join(nunit_args),
'-filter:+[Grpc.Core]*',
'-register:user',
'-output:src\\csharp\\coverage_csharp_%s.xml' % assembly]
# set really high cpu_cost to make sure instances of OpenCover.Console run exclusively
# to prevent problems with registering the profiler.
run_exclusive = 1000000
specs.append(self.config.job_spec(cmdline,
shortname='csharp.coverage.%s' % assembly,
cpu_cost=run_exclusive,
environ=_FORCE_ENVIRON_FOR_WRAPPERS))
return specs
def pre_build_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\pre_build_csharp.bat']]
else:
return [['tools/run_tests/helper_scripts/pre_build_csharp.sh']]
def make_targets(self):
return ['grpc_csharp_ext']
def make_options(self):
return self._make_options;
def build_steps(self):
if self.args.compiler == 'coreclr':
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\build_csharp_coreclr.bat']]
else:
return [['tools/run_tests/helper_scripts/build_csharp_coreclr.sh']]
else:
if self.platform == 'windows':
return [[_windows_build_bat(self.args.compiler),
'src/csharp/Grpc.sln',
'/p:Configuration=%s' % _MSBUILD_CONFIG[self.config.build_config]]]
else:
return [['tools/run_tests/helper_scripts/build_csharp.sh']]
def post_tests_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\post_tests_csharp.bat']]
else:
return [['tools/run_tests/helper_scripts/post_tests_csharp.sh']]
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/csharp_%s_%s' % (self._docker_distro,
_docker_arch_suffix(self.args.arch))
def __str__(self):
return 'csharp'
class ObjCLanguage(object):
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
return [
self.config.job_spec(['src/objective-c/tests/run_tests.sh'],
timeout_seconds=60*60,
shortname='objc-tests',
environ=_FORCE_ENVIRON_FOR_WRAPPERS),
self.config.job_spec(['src/objective-c/tests/build_example_test.sh'],
timeout_seconds=30*60,
shortname='objc-examples-build',
environ=_FORCE_ENVIRON_FOR_WRAPPERS),
]
def pre_build_steps(self):
return []
def make_targets(self):
return ['interop_server']
def make_options(self):
return []
def build_steps(self):
return [['src/objective-c/tests/build_tests.sh']]
def post_tests_steps(self):
return []
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return None
def __str__(self):
return 'objc'
class Sanity(object):
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
import yaml
with open('tools/run_tests/sanity/sanity_tests.yaml', 'r') as f:
environ={'TEST': 'true'}
if _is_use_docker_child():
environ['CLANG_FORMAT_SKIP_DOCKER'] = 'true'
return [self.config.job_spec(cmd['script'].split(),
timeout_seconds=30*60,
environ=environ,
cpu_cost=cmd.get('cpu_cost', 1))
for cmd in yaml.load(f)]
def pre_build_steps(self):
return []
def make_targets(self):
return ['run_dep_checks']
def make_options(self):
return []
def build_steps(self):
return []
def post_tests_steps(self):
return []
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/sanity'
def __str__(self):
return 'sanity'
class NodeExpressLanguage(object):
"""Dummy Node express test target to enable running express performance
benchmarks"""
def __init__(self):
self.platform = platform_string()
def configure(self, config, args):
self.config = config
self.args = args
_check_compiler(self.args.compiler, ['default', 'node0.12',
'node4', 'node5', 'node6'])
if self.args.compiler == 'default':
self.node_version = '4'
else:
# Take off the word "node"
self.node_version = self.args.compiler[4:]
def test_specs(self):
return []
def pre_build_steps(self):
if self.platform == 'windows':
return [['tools\\run_tests\\helper_scripts\\pre_build_node.bat']]
else:
return [['tools/run_tests/helper_scripts/pre_build_node.sh', self.node_version]]
def make_targets(self):
return []
def make_options(self):
return []
def build_steps(self):
return []
def post_tests_steps(self):
return []
def makefile_name(self):
return 'Makefile'
def dockerfile_dir(self):
return 'tools/dockerfile/test/node_jessie_%s' % _docker_arch_suffix(self.args.arch)
def __str__(self):
return 'node_express'
# different configurations we can run under
with open('tools/run_tests/generated/configs.json') as f:
_CONFIGS = dict((cfg['config'], Config(**cfg)) for cfg in ast.literal_eval(f.read()))
_LANGUAGES = {
'c++': CLanguage('cxx', 'c++'),
'c': CLanguage('c', 'c'),
'node': NodeLanguage(),
'node_express': NodeExpressLanguage(),
'php': PhpLanguage(),
'php7': Php7Language(),
'python': PythonLanguage(),
'ruby': RubyLanguage(),
'csharp': CSharpLanguage(),
'objc' : ObjCLanguage(),
'sanity': Sanity()
}
_MSBUILD_CONFIG = {
'dbg': 'Debug',
'opt': 'Release',
'gcov': 'Debug',
}
def _windows_arch_option(arch):
"""Returns msbuild cmdline option for selected architecture."""
if arch == 'default' or arch == 'x86':
return '/p:Platform=Win32'
elif arch == 'x64':
return '/p:Platform=x64'
else:
print('Architecture %s not supported.' % arch)
sys.exit(1)
def _check_arch_option(arch):
"""Checks that architecture option is valid."""
if platform_string() == 'windows':
_windows_arch_option(arch)
elif platform_string() == 'linux':
# On linux, we need to be running under docker with the right architecture.
runtime_arch = platform.architecture()[0]
if arch == 'default':
return
elif runtime_arch == '64bit' and arch == 'x64':
return
elif runtime_arch == '32bit' and arch == 'x86':
return
else:
print('Architecture %s does not match current runtime architecture.' % arch)
sys.exit(1)
else:
if args.arch != 'default':
print('Architecture %s not supported on current platform.' % args.arch)
sys.exit(1)
def _windows_build_bat(compiler):
"""Returns name of build.bat for selected compiler."""
# For CoreCLR, fall back to the default compiler for C core
if compiler == 'default' or compiler == 'vs2013' or compiler == 'coreclr':
return 'vsprojects\\build_vs2013.bat'
elif compiler == 'vs2015':
return 'vsprojects\\build_vs2015.bat'
elif compiler == 'vs2010':
return 'vsprojects\\build_vs2010.bat'
else:
print('Compiler %s not supported.' % compiler)
sys.exit(1)
def _windows_toolset_option(compiler):
"""Returns msbuild PlatformToolset for selected compiler."""
# For CoreCLR, fall back to the default compiler for C core
if compiler == 'default' or compiler == 'vs2013' or compiler == 'coreclr':
return '/p:PlatformToolset=v120'
elif compiler == 'vs2015':
return '/p:PlatformToolset=v140'
elif compiler == 'vs2010':
return '/p:PlatformToolset=v100'
else:
print('Compiler %s not supported.' % compiler)
sys.exit(1)
def _docker_arch_suffix(arch):
"""Returns suffix to dockerfile dir to use."""
if arch == 'default' or arch == 'x64':
return 'x64'
elif arch == 'x86':
return 'x86'
else:
print('Architecture %s not supported with current settings.' % arch)
sys.exit(1)
def runs_per_test_type(arg_str):
"""Auxilary function to parse the "runs_per_test" flag.
Returns:
A positive integer or 0, the latter indicating an infinite number of
runs.
Raises:
argparse.ArgumentTypeError: Upon invalid input.
"""
if arg_str == 'inf':
return 0
try:
n = int(arg_str)
if n <= 0: raise ValueError
return n
except:
msg = '\'{}\' is not a positive integer or \'inf\''.format(arg_str)
raise argparse.ArgumentTypeError(msg)
# parse command line
argp = argparse.ArgumentParser(description='Run grpc tests.')
argp.add_argument('-c', '--config',
choices=sorted(_CONFIGS.keys()),
default='opt')
argp.add_argument('-n', '--runs_per_test', default=1, type=runs_per_test_type,
help='A positive integer or "inf". If "inf", all tests will run in an '
'infinite loop. Especially useful in combination with "-f"')
argp.add_argument('-r', '--regex', default='.*', type=str)
argp.add_argument('--regex_exclude', default='', type=str)
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
argp.add_argument('-s', '--slowdown', default=1.0, type=float)
argp.add_argument('-f', '--forever',
default=False,
action='store_const',
const=True)
argp.add_argument('-t', '--travis',
default=False,
action='store_const',
const=True)
argp.add_argument('--newline_on_success',
default=False,
action='store_const',
const=True)
argp.add_argument('-l', '--language',
choices=['all'] + sorted(_LANGUAGES.keys()),
nargs='+',
default=['all'])
argp.add_argument('-S', '--stop_on_failure',
default=False,
action='store_const',
const=True)
argp.add_argument('--use_docker',
default=False,
action='store_const',
const=True,
help='Run all the tests under docker. That provides ' +
'additional isolation and prevents the need to install ' +
'language specific prerequisites. Only available on Linux.')
argp.add_argument('--allow_flakes',
default=False,
action='store_const',
const=True,
help='Allow flaky tests to show as passing (re-runs failed tests up to five times)')
argp.add_argument('--arch',
choices=['default', 'x86', 'x64'],
default='default',
help='Selects architecture to target. For some platforms "default" is the only supported choice.')
argp.add_argument('--compiler',
choices=['default',
'gcc4.4', 'gcc4.6', 'gcc4.8', 'gcc4.9', 'gcc5.3',
'clang3.4', 'clang3.5', 'clang3.6', 'clang3.7',
'vs2010', 'vs2013', 'vs2015',
'python2.7', 'python3.4', 'python3.5', 'python3.6', 'pypy', 'pypy3',
'node0.12', 'node4', 'node5', 'node6', 'node7',
'coreclr'],
default='default',
help='Selects compiler to use. Allowed values depend on the platform and language.')
argp.add_argument('--iomgr_platform',
choices=['native', 'uv'],
default='native',
help='Selects iomgr platform to build on')
argp.add_argument('--build_only',
default=False,
action='store_const',
const=True,
help='Perform all the build steps but dont run any tests.')
argp.add_argument('--measure_cpu_costs', default=False, action='store_const', const=True,
help='Measure the cpu costs of tests')
argp.add_argument('--update_submodules', default=[], nargs='*',
help='Update some submodules before building. If any are updated, also run generate_projects. ' +
'Submodules are specified as SUBMODULE_NAME:BRANCH; if BRANCH is omitted, master is assumed.')
argp.add_argument('-a', '--antagonists', default=0, type=int)
argp.add_argument('-x', '--xml_report', default=None, type=str,
help='Generates a JUnit-compatible XML report')
argp.add_argument('--report_suite_name', default='tests', type=str,
help='Test suite name to use in generated JUnit XML report')
argp.add_argument('--quiet_success',
default=False,
action='store_const',
const=True,
help='Dont print anything when a test passes. Passing tests also will not be reported in XML report. ' +
'Useful when running many iterations of each test (argument -n).')
argp.add_argument('--force_default_poller', default=False, action='store_const', const=True,
help='Dont try to iterate over many polling strategies when they exist')
args = argp.parse_args()
if args.force_default_poller:
_POLLING_STRATEGIES = {}
jobset.measure_cpu_costs = args.measure_cpu_costs
# update submodules if necessary
need_to_regenerate_projects = False
for spec in args.update_submodules:
spec = spec.split(':', 1)
if len(spec) == 1:
submodule = spec[0]
branch = 'master'
elif len(spec) == 2:
submodule = spec[0]
branch = spec[1]
cwd = 'third_party/%s' % submodule
def git(cmd, cwd=cwd):
print('in %s: git %s' % (cwd, cmd))
subprocess.check_call('git %s' % cmd, cwd=cwd, shell=True)
git('fetch')
git('checkout %s' % branch)
git('pull origin %s' % branch)
if os.path.exists('src/%s/gen_build_yaml.py' % submodule):
need_to_regenerate_projects = True
if need_to_regenerate_projects:
if jobset.platform_string() == 'linux':
subprocess.check_call('tools/buildgen/generate_projects.sh', shell=True)
else:
print('WARNING: may need to regenerate projects, but since we are not on')
print(' Linux this step is being skipped. Compilation MAY fail.')
# grab config
run_config = _CONFIGS[args.config]
build_config = run_config.build_config
if args.travis:
_FORCE_ENVIRON_FOR_WRAPPERS = {'GRPC_TRACE': 'api'}
if 'all' in args.language:
lang_list = _LANGUAGES.keys()
else:
lang_list = args.language
# We don't support code coverage on some languages
if 'gcov' in args.config:
for bad in ['objc', 'sanity']:
if bad in lang_list:
lang_list.remove(bad)
languages = set(_LANGUAGES[l] for l in lang_list)
for l in languages:
l.configure(run_config, args)
language_make_options=[]
if any(language.make_options() for language in languages):
if not 'gcov' in args.config and len(languages) != 1:
print('languages with custom make options cannot be built simultaneously with other languages')
sys.exit(1)
else:
language_make_options = next(iter(languages)).make_options()
if args.use_docker:
if not args.travis:
print('Seen --use_docker flag, will run tests under docker.')
print('')
print('IMPORTANT: The changes you are testing need to be locally committed')
print('because only the committed changes in the current branch will be')
print('copied to the docker environment.')
time.sleep(5)
dockerfile_dirs = set([l.dockerfile_dir() for l in languages])
if len(dockerfile_dirs) > 1:
if 'gcov' in args.config:
dockerfile_dir = 'tools/dockerfile/test/multilang_jessie_x64'
print ('Using multilang_jessie_x64 docker image for code coverage for '
'all languages.')
else:
print ('Languages to be tested require running under different docker '
'images.')
sys.exit(1)
else:
dockerfile_dir = next(iter(dockerfile_dirs))
child_argv = [ arg for arg in sys.argv if not arg == '--use_docker' ]
run_tests_cmd = 'python tools/run_tests/run_tests.py %s' % ' '.join(child_argv[1:])
env = os.environ.copy()
env['RUN_TESTS_COMMAND'] = run_tests_cmd
env['DOCKERFILE_DIR'] = dockerfile_dir
env['DOCKER_RUN_SCRIPT'] = 'tools/run_tests/dockerize/docker_run_tests.sh'
if args.xml_report:
env['XML_REPORT'] = args.xml_report
if not args.travis:
env['TTY_FLAG'] = '-t' # enables Ctrl-C when not on Jenkins.
subprocess.check_call(['tools/run_tests/dockerize/build_docker_and_run_tests.sh'],
shell=True,
env=env)
sys.exit(0)
_check_arch_option(args.arch)
def make_jobspec(cfg, targets, makefile='Makefile'):
if platform_string() == 'windows':
extra_args = []
# better do parallel compilation
# empirically /m:2 gives the best performance/price and should prevent
# overloading the windows workers.
extra_args.extend(['/m:2'])
# disable PDB generation: it's broken, and we don't need it during CI
extra_args.extend(['/p:Jenkins=true'])
return [
jobset.JobSpec([_windows_build_bat(args.compiler),
'vsprojects\\%s.sln' % target,
'/p:Configuration=%s' % _MSBUILD_CONFIG[cfg]] +
extra_args +
language_make_options,
shell=True, timeout_seconds=None)
for target in targets]
else:
if targets:
return [jobset.JobSpec([os.getenv('MAKE', 'make'),
'-f', makefile,
'-j', '%d' % args.jobs,
'EXTRA_DEFINES=GRPC_TEST_SLOWDOWN_MACHINE_FACTOR=%f' % args.slowdown,
'CONFIG=%s' % cfg] +
language_make_options +
([] if not args.travis else ['JENKINS_BUILD=1']) +
targets,
timeout_seconds=None)]
else:
return []
make_targets = {}
for l in languages:
makefile = l.makefile_name()
make_targets[makefile] = make_targets.get(makefile, set()).union(
set(l.make_targets()))
def build_step_environ(cfg):
environ = {'CONFIG': cfg}
msbuild_cfg = _MSBUILD_CONFIG.get(cfg)
if msbuild_cfg:
environ['MSBUILD_CONFIG'] = msbuild_cfg
return environ
build_steps = list(set(
jobset.JobSpec(cmdline, environ=build_step_environ(build_config), flake_retries=5)
for l in languages
for cmdline in l.pre_build_steps()))
if make_targets:
make_commands = itertools.chain.from_iterable(make_jobspec(build_config, list(targets), makefile) for (makefile, targets) in make_targets.items())
build_steps.extend(set(make_commands))
build_steps.extend(set(
jobset.JobSpec(cmdline, environ=build_step_environ(build_config), timeout_seconds=None)
for l in languages
for cmdline in l.build_steps()))
post_tests_steps = list(set(
jobset.JobSpec(cmdline, environ=build_step_environ(build_config))
for l in languages
for cmdline in l.post_tests_steps()))
runs_per_test = args.runs_per_test
forever = args.forever
def _shut_down_legacy_server(legacy_server_port):
try:
version = int(urllib.request.urlopen(
'http://localhost:%d/version_number' % legacy_server_port,
timeout=10).read())
except:
pass
else:
urllib.request.urlopen(
'http://localhost:%d/quitquitquit' % legacy_server_port).read()
def _start_port_server(port_server_port):
# check if a compatible port server is running
# if incompatible (version mismatch) ==> start a new one
# if not running ==> start a new one
# otherwise, leave it up
try:
version = int(urllib.request.urlopen(
'http://localhost:%d/version_number' % port_server_port,
timeout=10).read())
print('detected port server running version %d' % version)
running = True
except Exception as e:
print('failed to detect port server: %s' % sys.exc_info()[0])
print(e.strerror)
running = False
if running:
current_version = int(subprocess.check_output(
[sys.executable, os.path.abspath('tools/run_tests/python_utils/port_server.py'),
'dump_version']))
print('my port server is version %d' % current_version)
running = (version >= current_version)
if not running:
print('port_server version mismatch: killing the old one')
urllib.request.urlopen('http://localhost:%d/quitquitquit' % port_server_port).read()
time.sleep(1)
if not running:
fd, logfile = tempfile.mkstemp()
os.close(fd)
print('starting port_server, with log file %s' % logfile)
args = [sys.executable, os.path.abspath('tools/run_tests/python_utils/port_server.py'),
'-p', '%d' % port_server_port, '-l', logfile]
env = dict(os.environ)
env['BUILD_ID'] = 'pleaseDontKillMeJenkins'
if platform_string() == 'windows':
# Working directory of port server needs to be outside of Jenkins
# workspace to prevent file lock issues.
tempdir = tempfile.mkdtemp()
port_server = subprocess.Popen(
args,
env=env,
cwd=tempdir,
creationflags = 0x00000008, # detached process
close_fds=True)
else:
port_server = subprocess.Popen(
args,
env=env,
preexec_fn=os.setsid,
close_fds=True)
time.sleep(1)
# ensure port server is up
waits = 0
while True:
if waits > 10:
print('killing port server due to excessive start up waits')
port_server.kill()
if port_server.poll() is not None:
print('port_server failed to start')
# try one final time: maybe another build managed to start one
time.sleep(1)
try:
urllib.request.urlopen('http://localhost:%d/get' % port_server_port,
timeout=1).read()
print('last ditch attempt to contact port server succeeded')
break
except:
traceback.print_exc()
port_log = open(logfile, 'r').read()
print(port_log)
sys.exit(1)
try:
urllib.request.urlopen('http://localhost:%d/get' % port_server_port,
timeout=1).read()
print('port server is up and ready')
break
except socket.timeout:
print('waiting for port_server: timeout')
traceback.print_exc();
time.sleep(1)
waits += 1
except urllib.error.URLError:
print('waiting for port_server: urlerror')
traceback.print_exc();
time.sleep(1)
waits += 1
except:
traceback.print_exc()
port_server.kill()
raise
def _calculate_num_runs_failures(list_of_results):
"""Caculate number of runs and failures for a particular test.
Args:
list_of_results: (List) of JobResult object.
Returns:
A tuple of total number of runs and failures.
"""
num_runs = len(list_of_results) # By default, there is 1 run per JobResult.
num_failures = 0
for jobresult in list_of_results:
if jobresult.retries > 0:
num_runs += jobresult.retries
if jobresult.num_failures > 0:
num_failures += jobresult.num_failures
return num_runs, num_failures
# _build_and_run results
class BuildAndRunError(object):
BUILD = object()
TEST = object()
POST_TEST = object()
# returns a list of things that failed (or an empty list on success)
def _build_and_run(
check_cancelled, newline_on_success, xml_report=None, build_only=False):
"""Do one pass of building & running tests."""
# build latest sequentially
num_failures, resultset = jobset.run(
build_steps, maxjobs=1, stop_on_failure=True,
newline_on_success=newline_on_success, travis=args.travis)
if num_failures:
return [BuildAndRunError.BUILD]
if build_only:
if xml_report:
report_utils.render_junit_xml_report(resultset, xml_report,
suite_name=args.report_suite_name)
return []
# start antagonists
antagonists = [subprocess.Popen(['tools/run_tests/python_utils/antagonist.py'])
for _ in range(0, args.antagonists)]
port_server_port = 32766
_start_port_server(port_server_port)
resultset = None
num_test_failures = 0
try:
infinite_runs = runs_per_test == 0
one_run = set(
spec
for language in languages
for spec in language.test_specs()
if (re.search(args.regex, spec.shortname) and
(args.regex_exclude == '' or
not re.search(args.regex_exclude, spec.shortname))))
# When running on travis, we want out test runs to be as similar as possible
# for reproducibility purposes.
if args.travis:
massaged_one_run = sorted(one_run, key=lambda x: x.shortname)
else:
# whereas otherwise, we want to shuffle things up to give all tests a
# chance to run.
massaged_one_run = list(one_run) # random.shuffle needs an indexable seq.
random.shuffle(massaged_one_run) # which it modifies in-place.
if infinite_runs:
assert len(massaged_one_run) > 0, 'Must have at least one test for a -n inf run'
runs_sequence = (itertools.repeat(massaged_one_run) if infinite_runs
else itertools.repeat(massaged_one_run, runs_per_test))
all_runs = itertools.chain.from_iterable(runs_sequence)
if args.quiet_success:
jobset.message('START', 'Running tests quietly, only failing tests will be reported', do_newline=True)
num_test_failures, resultset = jobset.run(
all_runs, check_cancelled, newline_on_success=newline_on_success,
travis=args.travis, infinite_runs=infinite_runs, maxjobs=args.jobs,
stop_on_failure=args.stop_on_failure,
add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port},
quiet_success=args.quiet_success)
if resultset:
for k, v in sorted(resultset.items()):
num_runs, num_failures = _calculate_num_runs_failures(v)
if num_failures > 0:
if num_failures == num_runs: # what about infinite_runs???
jobset.message('FAILED', k, do_newline=True)
else:
jobset.message(
'FLAKE', '%s [%d/%d runs flaked]' % (k, num_failures, num_runs),
do_newline=True)
finally:
for antagonist in antagonists:
antagonist.kill()
if xml_report and resultset:
report_utils.render_junit_xml_report(resultset, xml_report,
suite_name=args.report_suite_name)
number_failures, _ = jobset.run(
post_tests_steps, maxjobs=1, stop_on_failure=True,
newline_on_success=newline_on_success, travis=args.travis)
out = []
if number_failures:
out.append(BuildAndRunError.POST_TEST)
if num_test_failures:
out.append(BuildAndRunError.TEST)
return out
if forever:
success = True
while True:
dw = watch_dirs.DirWatcher(['src', 'include', 'test', 'examples'])
initial_time = dw.most_recent_change()
have_files_changed = lambda: dw.most_recent_change() != initial_time
previous_success = success
errors = _build_and_run(check_cancelled=have_files_changed,
newline_on_success=False,
build_only=args.build_only) == 0
if not previous_success and not errors:
jobset.message('SUCCESS',
'All tests are now passing properly',
do_newline=True)
jobset.message('IDLE', 'No change detected')
while not have_files_changed():
time.sleep(1)
else:
errors = _build_and_run(check_cancelled=lambda: False,
newline_on_success=args.newline_on_success,
xml_report=args.xml_report,
build_only=args.build_only)
if not errors:
jobset.message('SUCCESS', 'All tests passed', do_newline=True)
else:
jobset.message('FAILED', 'Some tests failed', do_newline=True)
exit_code = 0
if BuildAndRunError.BUILD in errors:
exit_code |= 1
if BuildAndRunError.TEST in errors:
exit_code |= 2
if BuildAndRunError.POST_TEST in errors:
exit_code |= 4
sys.exit(exit_code)
|
bsd-3-clause
|
dorbarker/cgmlst_autocreate
|
json2csv.py
|
1
|
2163
|
import argparse
import csv
import os
import json
def arguments():
parser = argparse.ArgumentParser()
parser.add_argument('-j', '--jsons', help = 'Path to JSON directory')
parser.add_argument('-o', '--out', help = 'CSV output file path.')
parser.add_argument('-t', '--test', help = 'Name of the MIST test name')
return parser.parse_args()
def load_json(filepath):
with open(filepath, 'r') as f:
data = json.load(f)
return data
def allele_calls(genes):
'''Read a MIST JSON and gathers the allele calls from it.
Will state if the gene was found to be missing (0), truncated (-1), or
a previously unobserved allele (?). The latter case is an indication
that you should run update_definitions.py
'''
calls = {}
for gene in genes:
if genes[gene]['BlastResults'] is None:
calls[gene] = '0'
elif genes[gene]['IsContigTruncation']:
calls[gene] = '-1'
elif genes[gene]['CorrectMarkerMatch'] is False:
calls[gene] = '?'
else:
calls[gene] = genes[gene]['MarkerCall']
return calls
def write_csv(results, outpath):
'''Writes results as a CSV file'''
# enforce alphanumeric ordering
genome_order = sorted(results.keys())
gene_order = sorted(results[genome_order[0]].keys())
header = ['genomes'] + gene_order
with open(outpath, 'w') as f:
out = csv.writer(f)
out.writerow(header)
for genome in genome_order:
line = [genome]
for gene in gene_order:
line.append(results[genome][gene])
out.writerow(line)
def main():
args = arguments()
jsons = [os.path.join(args.jsons, x) for x in os.listdir(args.jsons)]
results = {}
for j in jsons:
data = load_json(j)
strain = data['Results'][0]['Strain']
genes = data['Results'][0]['TestResults'][args.test]
results[strain] = allele_calls(genes)
write_csv(results, args.out)
if __name__ == '__main__':
main()
|
gpl-3.0
|
tedunderwood/GenreProject
|
python/workshop/bagofwords.py
|
2
|
6896
|
# bagofwords.py
#
# The BagOfWords class implements individual volumes as ordered
# lists of features.
#
import numpy as np
import pandas as pd
from pandas import Series, DataFrame
def all_nonalphanumeric(astring):
nonalphanum = True
for character in astring:
if character.isalpha() or character.isdigit():
nonalphanum = False
break
return nonalphanum
class BagOfWords:
def __init__(self, filepath, volID, include_punctuation):
''' Construct a BagOfWords.
volID is a string label for the volume.
include_punctuation is a boolean.
'''
self.volID = volID
with open(filepath, encoding = 'utf-8') as f:
filelines = f.readlines()
self.rawcounts = dict()
self.totalcount = 0
for line in filelines:
line = line.rstrip()
fields = line.split('\t')
if len(fields) != 2:
print("Illegal line length in " + filepath)
print(line)
continue
else:
tokentype = fields[0]
count = fields[1]
try:
intcount = int(count)
if include_punctuation or not all_nonalphanumeric(tokentype):
self.rawcounts[tokentype] = intcount
self.totalcount += intcount
except ValueError:
print("Cannot parse count " + count + " as integer.")
continue
self.numrawcounts = len(self.rawcounts)
def selectfeatures(self, featurelist):
''' A BagOfWords is created with merely a dictionary of raw token counts.
One could call this a sparse table. It has no entries where features are
missing.
We need to organize these as an ordered series of features, which includes
only the features we have chosen to use in the current model, and has zeroes for
missing values.
'''
self.featurelist = featurelist
self.numfeatures = len(featurelist)
self.features = Series(self.rawcounts, index = featurelist, dtype = 'float64')
# Pandas has the nice feature of building a series from a dictionary if it's
# provided an index of values. So this effectively builds a series of entries
# ordered by the keys in 'featurelist,' with NaN in places where rawcounts
# had no corresponding key.
self.features[self.features.isnull()] = 0
# This replaces NaN with zero, since missing words are effectively words with
# count == 0.
def normalizefrequencies(self):
''' Simply divides all frequencies by the total token count for this volume.
'''
self.features = self.features / self.totalcount
def standardizefrequencies(self, standardizer):
''' Convert features to z-scores by centering them on the means and
scaling them by standard deviation.
standardizer = an object of class StandardizingVector, presumably created
either on the corpus that contains this volume, or on the training corpus
that created the model we are about to use on this volume.
'''
assert len(self.features) == len(standardizer.means)
self.features = (self.features - standardizer.means) / standardizer.stdevs
class StandardizingVector:
''' An object that computes the means and standard deviations of features
across a corpus of volumes. These statistics can then be used to standardize
the feature vectors in volumes.
'''
def __init__(self, listofvolumes, featurelist):
numvolumes = len(listofvolumes)
numfeatures = len(featurelist)
# First a simple sanity check. We are talking about volumes with
# the same number of features, right?
for avolume in listofvolumes:
assert avolume.numfeatures == numfeatures
# And how about a spot check to make sure the lists are really the same?
for ourfeature, itsfeature in zip(featurelist, listofvolumes[0].featurelist):
assert ourfeature == itsfeature
# Okay, we're good. Initialize some pandas series.
means = list()
stdevs = list()
for afeature in featurelist:
featuredistribution = np.zeros(numvolumes)
# For each feature, create an array of possible values by polling volumes.
for volidx, avolume in enumerate(listofvolumes):
featuredistribution[volidx] = avolume.features[afeature]
# Then calculate mean and standard deviation for this feature.
thismean = np.mean(featuredistribution)
thisstd = np.std(featuredistribution)
if thisstd == 0:
print("Problematic standard deviation of zero for feature " + afeature)
thisstd = 0.0000001
# Cheesy hack is my middle name.
means.append(thismean)
stdevs.append(thisstd)
self.means = Series(means, index = featurelist)
self.stdevs = Series(stdevs, index = featurelist)
self.features = featurelist
# Because we're going to need the list of features to apply this model
# to other volumes.
# Done.
class WordVector:
''' A WordVector is just like a BagOfWords, except that it has
a simpler constructor — it just accepts a list of tokens.
In Java, you could write multiple constructors for one class.
In Python, I'd have to rewrite the constructor inelegantly to make
these a single class. So. Two classes.
'''
def __init__(self, listofwords):
''' Construct a WordVector from a list.
'''
self.rawcounts = dict()
self.totalcount = 0
for word in listofwords:
self.totalcount += 1
if word in self.rawcounts:
self.rawcounts[word] += 1
else:
self.rawcounts[word] = 1
self.numrawcounts = len(self.rawcounts)
def selectfeatures(self, featurelist):
''' A WordVector is created with merely a dictionary of raw token counts.
One could call this a sparse table. It has no entries where features are
missing.
We need to organize these as an ordered series of features, which includes
only the features we have chosen to use in the current model, and has zeroes for
missing values.
'''
self.featurelist = featurelist
self.numfeatures = len(featurelist)
self.features = Series(self.rawcounts, index = featurelist, dtype = 'float64')
# Pandas has the nice feature of building a series from a dictionary if it's
# provided an index of values. So this effectively builds a series of entries
# ordered by the keys in 'featurelist,' with NaN in places where rawcounts
# had no corresponding key.
self.features[self.features.isnull()] = 0
# This replaces NaN with zero, since missing words are effectively words with
# count == 0.
def normalizefrequencies(self):
''' Simply divides all frequencies by the total token count for this volume.
'''
self.features = self.features / self.totalcount
def standardizefrequencies(self, standardizer):
''' Convert features to z-scores by centering them on the means and
scaling them by standard deviation.
standardizer = an object of class StandardizingVector, presumably created
either on the corpus that contains this volume, or on the training corpus
that created the model we are about to use on this volume.
'''
assert len(self.features) == len(standardizer.means)
self.features = (self.features - standardizer.means) / standardizer.stdevs
|
mit
|
brucetsao/arduino-ameba
|
arduino-core/src/processing/app/i18n/python/requests/cookies.py
|
209
|
14024
|
# -*- coding: utf-8 -*-
"""
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import collections
from .compat import cookielib, urlparse, Morsel
try:
import threading
# grr, pyflakes: this fixes "redefinition of unused 'threading'"
threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
return self._r.url
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""Produce an appropriate Cookie header string to be sent with `request`, or None."""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name == name:
if domain is None or domain == cookie.domain:
if path is None or path == cookie.path:
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific."""
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with
with external client code. All `requests` code should work out of the box
with externally provided instances of CookieJar, e.g., LWPCookieJar and
FileCookieJar.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable.
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1)."""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains."""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar.
See values() and items()."""
keys = []
for cookie in iter(self):
keys.append(cookie.name)
return keys
def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar.
See keys() and items()."""
values = []
for cookie in iter(self):
values.append(cookie.value)
return values
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar.
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar)
and get a vanilla python dict of key value pairs."""
items = []
for cookie in iter(self):
items.append((cookie.name, cookie.value))
return items
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise."""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old
Python dict of name-value pairs of cookies that meet the requirements."""
dictionary = {}
for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None
or cookie.path == path):
dictionary[cookie.name] = cookie.value
return dictionary
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(cookie)
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown
if there are conflicting cookies."""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally.
Takes as args name and optional domain and path. Returns a cookie.value.
Throws KeyError if cookie is not found and CookieConflictError if there are
multiple cookies that match name and optionally domain and path."""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.update(self)
return new_cj
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = dict(
version=0,
name=name,
value=value,
port=None,
domain='',
path='/',
secure=False,
expires=None,
discard=True,
comment=None,
comment_url=None,
rest={'HttpOnly': None},
rfc2109=False,)
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
c = create_cookie(
name=morsel.key,
value=morsel.value,
version=morsel['version'] or 0,
port=None,
port_specified=False,
domain=morsel['domain'],
domain_specified=bool(morsel['domain']),
domain_initial_dot=morsel['domain'].startswith('.'),
path=morsel['path'],
path_specified=bool(morsel['path']),
secure=bool(morsel['secure']),
expires=morsel['max-age'] or morsel['expires'],
discard=False,
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,)
return c
def cookiejar_from_dict(cookie_dict, cookiejar=None):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
for name in cookie_dict:
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
|
lgpl-2.1
|
Jaiz909/youtube-dl
|
youtube_dl/extractor/npo.py
|
9
|
12703
|
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
fix_xml_ampersands,
parse_duration,
qualities,
strip_jsonp,
unified_strdate,
url_basename,
)
class NPOBaseIE(InfoExtractor):
def _get_token(self, video_id):
token_page = self._download_webpage(
'http://ida.omroep.nl/npoplayer/i.js',
video_id, note='Downloading token')
return self._search_regex(
r'npoplayer\.token = "(.+?)"', token_page, 'token')
class NPOIE(NPOBaseIE):
IE_NAME = 'npo.nl'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/(?!live|radio)[^/]+/[^/]+/(?P<id>[^/?]+)'
_TESTS = [
{
'url': 'http://www.npo.nl/nieuwsuur/22-06-2014/VPWON_1220719',
'md5': '4b3f9c429157ec4775f2c9cb7b911016',
'info_dict': {
'id': 'VPWON_1220719',
'ext': 'm4v',
'title': 'Nieuwsuur',
'description': 'Dagelijks tussen tien en elf: nieuws, sport en achtergronden.',
'upload_date': '20140622',
},
},
{
'url': 'http://www.npo.nl/de-mega-mike-mega-thomas-show/27-02-2009/VARA_101191800',
'md5': 'da50a5787dbfc1603c4ad80f31c5120b',
'info_dict': {
'id': 'VARA_101191800',
'ext': 'm4v',
'title': 'De Mega Mike & Mega Thomas show',
'description': 'md5:3b74c97fc9d6901d5a665aac0e5400f4',
'upload_date': '20090227',
'duration': 2400,
},
},
{
'url': 'http://www.npo.nl/tegenlicht/25-02-2013/VPWON_1169289',
'md5': 'f8065e4e5a7824068ed3c7e783178f2c',
'info_dict': {
'id': 'VPWON_1169289',
'ext': 'm4v',
'title': 'Tegenlicht',
'description': 'md5:d6476bceb17a8c103c76c3b708f05dd1',
'upload_date': '20130225',
'duration': 3000,
},
},
{
'url': 'http://www.npo.nl/de-nieuwe-mens-deel-1/21-07-2010/WO_VPRO_043706',
'info_dict': {
'id': 'WO_VPRO_043706',
'ext': 'wmv',
'title': 'De nieuwe mens - Deel 1',
'description': 'md5:518ae51ba1293ffb80d8d8ce90b74e4b',
'duration': 4680,
},
'params': {
# mplayer mms download
'skip_download': True,
}
},
# non asf in streams
{
'url': 'http://www.npo.nl/hoe-gaat-europa-verder-na-parijs/10-01-2015/WO_NOS_762771',
'md5': 'b3da13de374cbe2d5332a7e910bef97f',
'info_dict': {
'id': 'WO_NOS_762771',
'ext': 'mp4',
'title': 'Hoe gaat Europa verder na Parijs?',
},
},
]
def _real_extract(self, url):
video_id = self._match_id(url)
return self._get_info(video_id)
def _get_info(self, video_id):
metadata = self._download_json(
'http://e.omroep.nl/metadata/aflevering/%s' % video_id,
video_id,
# We have to remove the javascript callback
transform_source=strip_jsonp,
)
token = self._get_token(video_id)
formats = []
pubopties = metadata.get('pubopties')
if pubopties:
quality = qualities(['adaptive', 'wmv_sb', 'h264_sb', 'wmv_bb', 'h264_bb', 'wvc1_std', 'h264_std'])
for format_id in pubopties:
format_info = self._download_json(
'http://ida.omroep.nl/odi/?prid=%s&puboptions=%s&adaptive=yes&token=%s'
% (video_id, format_id, token),
video_id, 'Downloading %s JSON' % format_id)
if format_info.get('error_code', 0) or format_info.get('errorcode', 0):
continue
streams = format_info.get('streams')
if streams:
video_info = self._download_json(
streams[0] + '&type=json',
video_id, 'Downloading %s stream JSON' % format_id)
else:
video_info = format_info
video_url = video_info.get('url')
if not video_url:
continue
if format_id == 'adaptive':
formats.extend(self._extract_m3u8_formats(video_url, video_id))
else:
formats.append({
'url': video_url,
'format_id': format_id,
'quality': quality(format_id),
})
streams = metadata.get('streams')
if streams:
for i, stream in enumerate(streams):
stream_url = stream.get('url')
if not stream_url:
continue
if '.asf' not in stream_url:
formats.append({
'url': stream_url,
'quality': stream.get('kwaliteit'),
})
continue
asx = self._download_xml(
stream_url, video_id,
'Downloading stream %d ASX playlist' % i,
transform_source=fix_xml_ampersands)
ref = asx.find('./ENTRY/Ref')
if ref is None:
continue
video_url = ref.get('href')
if not video_url:
continue
formats.append({
'url': video_url,
'ext': stream.get('formaat', 'asf'),
'quality': stream.get('kwaliteit'),
})
self._sort_formats(formats)
subtitles = {}
if metadata.get('tt888') == 'ja':
subtitles['nl'] = [{
'ext': 'vtt',
'url': 'http://e.omroep.nl/tt888/%s' % video_id,
}]
return {
'id': video_id,
'title': metadata['titel'],
'description': metadata['info'],
'thumbnail': metadata.get('images', [{'url': None}])[-1]['url'],
'upload_date': unified_strdate(metadata.get('gidsdatum')),
'duration': parse_duration(metadata.get('tijdsduur')),
'formats': formats,
'subtitles': subtitles,
}
class NPOLiveIE(NPOBaseIE):
IE_NAME = 'npo.nl:live'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/live/(?P<id>.+)'
_TEST = {
'url': 'http://www.npo.nl/live/npo-1',
'info_dict': {
'id': 'LI_NEDERLAND1_136692',
'display_id': 'npo-1',
'ext': 'mp4',
'title': 're:^Nederland 1 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'description': 'Livestream',
'is_live': True,
},
'params': {
'skip_download': True,
}
}
def _real_extract(self, url):
display_id = self._match_id(url)
webpage = self._download_webpage(url, display_id)
live_id = self._search_regex(
r'data-prid="([^"]+)"', webpage, 'live id')
metadata = self._download_json(
'http://e.omroep.nl/metadata/%s' % live_id,
display_id, transform_source=strip_jsonp)
token = self._get_token(display_id)
formats = []
streams = metadata.get('streams')
if streams:
for stream in streams:
stream_type = stream.get('type').lower()
# smooth streaming is not supported
if stream_type in ['ss', 'ms']:
continue
stream_info = self._download_json(
'http://ida.omroep.nl/aapi/?stream=%s&token=%s&type=jsonp'
% (stream.get('url'), token),
display_id, 'Downloading %s JSON' % stream_type)
if stream_info.get('error_code', 0) or stream_info.get('errorcode', 0):
continue
stream_url = self._download_json(
stream_info['stream'], display_id,
'Downloading %s URL' % stream_type,
'Unable to download %s URL' % stream_type,
transform_source=strip_jsonp, fatal=False)
if not stream_url:
continue
if stream_type == 'hds':
f4m_formats = self._extract_f4m_formats(stream_url, display_id)
# f4m downloader downloads only piece of live stream
for f4m_format in f4m_formats:
f4m_format['preference'] = -1
formats.extend(f4m_formats)
elif stream_type == 'hls':
formats.extend(self._extract_m3u8_formats(stream_url, display_id, 'mp4'))
else:
formats.append({
'url': stream_url,
'preference': -10,
})
self._sort_formats(formats)
return {
'id': live_id,
'display_id': display_id,
'title': self._live_title(metadata['titel']),
'description': metadata['info'],
'thumbnail': metadata.get('images', [{'url': None}])[-1]['url'],
'formats': formats,
'is_live': True,
}
class NPORadioIE(InfoExtractor):
IE_NAME = 'npo.nl:radio'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/radio/(?P<id>[^/]+)/?$'
_TEST = {
'url': 'http://www.npo.nl/radio/radio-1',
'info_dict': {
'id': 'radio-1',
'ext': 'mp3',
'title': 're:^NPO Radio 1 [0-9]{4}-[0-9]{2}-[0-9]{2} [0-9]{2}:[0-9]{2}$',
'is_live': True,
},
'params': {
'skip_download': True,
}
}
@staticmethod
def _html_get_attribute_regex(attribute):
return r'{0}\s*=\s*\'([^\']+)\''.format(attribute)
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_regex(
self._html_get_attribute_regex('data-channel'), webpage, 'title')
stream = self._parse_json(
self._html_search_regex(self._html_get_attribute_regex('data-streams'), webpage, 'data-streams'),
video_id)
codec = stream.get('codec')
return {
'id': video_id,
'url': stream['url'],
'title': self._live_title(title),
'acodec': codec,
'ext': codec,
'is_live': True,
}
class NPORadioFragmentIE(InfoExtractor):
IE_NAME = 'npo.nl:radio:fragment'
_VALID_URL = r'https?://(?:www\.)?npo\.nl/radio/[^/]+/fragment/(?P<id>\d+)'
_TEST = {
'url': 'http://www.npo.nl/radio/radio-5/fragment/174356',
'md5': 'dd8cc470dad764d0fdc70a9a1e2d18c2',
'info_dict': {
'id': '174356',
'ext': 'mp3',
'title': 'Jubileumconcert Willeke Alberti',
},
}
def _real_extract(self, url):
audio_id = self._match_id(url)
webpage = self._download_webpage(url, audio_id)
title = self._html_search_regex(
r'href="/radio/[^/]+/fragment/%s" title="([^"]+)"' % audio_id,
webpage, 'title')
audio_url = self._search_regex(
r"data-streams='([^']+)'", webpage, 'audio url')
return {
'id': audio_id,
'url': audio_url,
'title': title,
}
class TegenlichtVproIE(NPOIE):
IE_NAME = 'tegenlicht.vpro.nl'
_VALID_URL = r'https?://tegenlicht\.vpro\.nl/afleveringen/.*?'
_TESTS = [
{
'url': 'http://tegenlicht.vpro.nl/afleveringen/2012-2013/de-toekomst-komt-uit-afrika.html',
'md5': 'f8065e4e5a7824068ed3c7e783178f2c',
'info_dict': {
'id': 'VPWON_1169289',
'ext': 'm4v',
'title': 'Tegenlicht',
'description': 'md5:d6476bceb17a8c103c76c3b708f05dd1',
'upload_date': '20130225',
},
},
]
def _real_extract(self, url):
name = url_basename(url)
webpage = self._download_webpage(url, name)
urn = self._html_search_meta('mediaurn', webpage)
info_page = self._download_json(
'http://rs.vpro.nl/v2/api/media/%s.json' % urn, name)
return self._get_info(info_page['mid'])
|
unlicense
|
YISION/yision.github.io
|
randomforest-决策树与随机森林/f.py
|
1
|
2724
|
#encoding=utf8
from sklearn.ensemble import RandomForestClassifier
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#导入数据集与设定因变量及响应变量#
data=pd.read_csv('data.csv')
#data=pd.read_csv('data_p.csv')
df=pd.DataFrame(data)
df=df.dropna()#去除缺失值#
y=df.BAD
x=pd.concat([df.LOAN,df.MORTDUE,df.VALUE,df.YOJ,df.DEROG,df.DELINQ,df.CLAGE,df.NINQ,df.CLNO],axis=1)
#x=pd.concat([df.LOAN,df.MORTDUE,df.VALUE,df.YOJ,df.DEROG,df.DELINQ,df.CLAGE,df.NINQ,df.CLNO,df.Resn_HomeImp,df.Resn_DebtCon,df.Job_Mgr,df.Job_Office,df.Job_Self,df.Job_ProfExe,df.Job_Sales,df.Job_Other],axis=1)
print df.head()
#分差训练集与测试集#
from sklearn.cross_validation import train_test_split
x_train,x_test,y_train,y_test=train_test_split(x,y,test_size=0.4,random_state=1)
#使用决策树#
from sklearn import tree, cross_validation
clf=tree.DecisionTreeClassifier(criterion='entropy',min_samples_leaf=8)
#print (clf)
print clf.fit(x_train,y_train)
#对测试集进行验证#
print("决策树准确率:{:.16f}".format(clf.score(x_test,y_test)))
print("决策树训练集:{:.16f}".format(clf.score(x_train,y_train)))
#生成特征图#
feature_importance=clf.feature_importances_
important_features=x_train.columns.values[0::]
feature_importance=100.0*(feature_importance/feature_importance.max())
sorted_idx=np.argsort(feature_importance)[::-1]
pos=np.arange(sorted_idx.shape[0])+.5
plt.title('Feature Importance')
plt.barh(pos,feature_importance[sorted_idx[::-1]],color='r',align='center')
plt.yticks(pos,important_features)
plt.xlabel('Relative Importance')
plt.draw()
plt.show()
#描绘决策树#
import pydot,StringIO
dot_data = StringIO.StringIO()
tree.export_graphviz(clf, out_file=dot_data, feature_names=['LOAN','MORTDUE','VALUE','YOJ','DEROG','DELINQ','CLAGE','NINQ','CLNO'])
dot_data.getvalue()
pydot.graph_from_dot_data(dot_data.getvalue())
graph = pydot.graph_from_dot_data(dot_data.getvalue())
graph.write_png('tree.png')
from IPython.core.display import Image
Image(filename='tree.png')
#对模型进行交叉验证#
score1=cross_validation.cross_val_score(clf, x, y,cv=10)
print score1
#随机森林#
clf2=RandomForestClassifier(n_estimators=1000,criterion='entropy',min_samples_leaf=8,random_state=1,n_jobs=5)
#print (clf2)
print clf2.fit(x_train,y_train)
#对测试集进行验证#
print("随机森林准确率:{:.16f}".format(clf2.score(x_test,y_test)))
print("随机森林训练集:{:.16f}".format(clf2.score(x_train,y_train)))
#两者模型比较,交叉验证#
score2=cross_validation.cross_val_score(clf2,x,y,cv=10)
print score2
print ("决策树交叉验证:")
print score1.mean()
print ("随机森林交叉验证:")
print score2.mean()
|
mit
|
Markus-Goetz/CDS-Invenio-Authorlist
|
modules/websearch/lib/websearch_external_collections_templates.py
|
6
|
7128
|
# -*- coding: utf-8 -*-
## This file is part of Invenio.
## Copyright (C) 2006, 2007, 2008, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Template for the external collections search."""
__revision__ = "$Id$"
import cgi
from invenio.config import CFG_SITE_LANG
from invenio.messages import gettext_set_language
from invenio.urlutils import create_html_link
class Template:
"""Template class for the external collection search. To be loaded with template.load()"""
def __init__(self):
pass
def external_collection_seealso_box(self, lang, links,
prolog_start='<table class="externalcollectionsbox"><tr><th colspan="2" class="externalcollectionsboxheader">',
prolog_end='</th></tr><tr><td class="externalcollectionsboxbody">',
column_separator='</td><td class="externalcollectionsboxbody">',
link_separator= '<br />', epilog='</td></tr></table>'):
"""Creates the box that proposes links to other useful search engines like Google.
lang: string - The language to display in
links: list of string - List of links to display in the box
prolog_start, prolog_end, column_separator, link_separator, epilog': strings -
default HTML code for the specified position in the box"""
_ = gettext_set_language(lang)
out = ""
if links:
out += '<a name="externalcollectionsbox"></a>'
out += prolog_start
out += _("Haven't found what you were looking for? Try your search on other servers:")
out += prolog_end
nb_out_links_in_one_column = len(links)/2 + len(links) % 2
out += link_separator.join(links[:nb_out_links_in_one_column])
out += column_separator
out += link_separator.join(links[nb_out_links_in_one_column:])
out += epilog
return out
def external_collection_overview(self, lang=CFG_SITE_LANG, engine_list=()):
"""Prints results overview box with links to particular collections below.
lang: The language to display
engine_list: The external engines to be used"""
if len(engine_list) < 1:
return ""
_ = gettext_set_language(lang)
out = """
<table class="externalcollectionsresultsbox">
<thead>
<tr>
<th class="externalcollectionsresultsboxheader"><strong>%s</strong></th>
</tr>
</thead>
<tbody>
<tr>
<td class="externalcollectionsresultsboxbody"> """ % _("External collections results overview:")
for engine in engine_list:
internal_name = get_link_name(engine.name)
name = _(engine.name)
out += """<strong><a href="#%(internal_name)s">%(name)s</a></strong><br />""" % locals()
out += """
</td>
</tr>
</tbody>
</table>
"""
return out
def print_info_line(req,
html_external_engine_name_box,
html_external_engine_nb_results_box,
html_external_engine_nb_seconds_box):
"""Print on req an information line about results of an external collection search."""
req.write('<table class="externalcollectionsresultsbox"><tr>')
req.write('<td class="externalcollectionsresultsboxheader">')
req.write('<big><strong>' + \
html_external_engine_name_box + \
'</strong></big>')
req.write(' ')
req.write(html_external_engine_nb_results_box)
req.write('</td><td class="externalcollectionsresultsboxheader" width="20%" align="right">')
req.write('<small>' + \
html_external_engine_nb_seconds_box + \
'</small>')
req.write('</td></tr></table><br />')
def print_timeout(req, lang, engine, name, url):
"""Print info line for timeout."""
_ = gettext_set_language(lang)
req.write('<a name="%s"></a>' % get_link_name(engine.name))
print_info_line(req,
create_html_link(url, {}, name, {}, False, False),
'',
_('Search timed out.'))
message = _("The external search engine has not responded in time. You can check its results here:")
req.write(message + ' ' + create_html_link(url, {}, name, {}, False, False) + '<br />')
def get_link_name(name):
"""Return a hash string for the string name."""
return hex(abs(name.__hash__()))
def print_results(req, lang, pagegetter, infos, current_time):
"""Print results of a given search engine.
current_time is actually the duration, expressed in seconds of execution of request.
"""
_ = gettext_set_language(lang)
url = infos[0]
engine = infos[1]
user_url = infos[2]
internal_name = get_link_name(engine.name)
name = _(engine.name)
base_url = engine.base_url
results = engine.parser.parse_and_get_results(pagegetter.data)
html_tit = make_url(name, base_url)
num = format_number(engine.parser.parse_num_results())
if num:
if num == '0':
html_num = _('No results found.')
html_sec = ''
else:
html_num = '<strong>' + \
make_url(_('%s results found') % num, user_url or url) + \
'</strong>'
html_sec = '(' + _('%s seconds') % ('%2.2f' % current_time) + ')'
else:
html_num = _('No results found.')
html_sec = ''
req.write('<a name="%(internal_name)s"></a>' % locals())
print_info_line(req,
html_tit,
html_num,
html_sec)
for result in results:
req.write(result.html + '<br />')
if not results:
req.write(_('No results found.') + '<br />')
def make_url(name, url):
if url:
return '<a href="' + cgi.escape(url) + '">' + name + '</a>'
else:
return name
def format_number(num, separator=','):
"""Format a number by separating thousands with a separator (by default a comma)
>>> format_number(10)
'10'
>>> format_number(10000)
'10,000'
>>> format_number(' 000213212424249 ', '.')
'213.212.424.249'
"""
result = ""
try:
num = int(num)
except:
return None
if num == 0:
return '0'
while num > 0:
part = num % 1000
num = num / 1000
result = "%03d" % part + separator + result
return result.strip('0').strip(separator)
|
gpl-2.0
|
ayumilong/rethinkdb
|
test/rql_test/connections/http_support/flask/testsuite/helpers.py
|
405
|
21973
|
# -*- coding: utf-8 -*-
"""
flask.testsuite.helpers
~~~~~~~~~~~~~~~~~~~~~~~
Various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import flask
import unittest
from logging import StreamHandler
from flask.testsuite import FlaskTestCase, catch_warnings, catch_stderr
from werkzeug.http import parse_cache_control_header, parse_options_header
from flask._compat import StringIO, text_type
def has_encoding(name):
try:
import codecs
codecs.lookup(name)
return True
except LookupError:
return False
class JSONTestCase(FlaskTestCase):
def test_json_bad_requests(self):
app = flask.Flask(__name__)
@app.route('/json', methods=['POST'])
def return_json():
return flask.jsonify(foo=text_type(flask.request.get_json()))
c = app.test_client()
rv = c.post('/json', data='malformed', content_type='application/json')
self.assert_equal(rv.status_code, 400)
def test_json_body_encoding(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
return flask.request.get_json()
c = app.test_client()
resp = c.get('/', data=u'"Hällo Wörld"'.encode('iso-8859-15'),
content_type='application/json; charset=iso-8859-15')
self.assert_equal(resp.data, u'Hällo Wörld'.encode('utf-8'))
def test_jsonify(self):
d = dict(a=23, b=42, c=[1, 2, 3])
app = flask.Flask(__name__)
@app.route('/kw')
def return_kwargs():
return flask.jsonify(**d)
@app.route('/dict')
def return_dict():
return flask.jsonify(d)
c = app.test_client()
for url in '/kw', '/dict':
rv = c.get(url)
self.assert_equal(rv.mimetype, 'application/json')
self.assert_equal(flask.json.loads(rv.data), d)
def test_json_as_unicode(self):
app = flask.Flask(__name__)
app.config['JSON_AS_ASCII'] = True
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, '"\\u2603"')
app.config['JSON_AS_ASCII'] = False
with app.app_context():
rv = flask.json.dumps(u'\N{SNOWMAN}')
self.assert_equal(rv, u'"\u2603"')
def test_json_attr(self):
app = flask.Flask(__name__)
@app.route('/add', methods=['POST'])
def add():
json = flask.request.get_json()
return text_type(json['a'] + json['b'])
c = app.test_client()
rv = c.post('/add', data=flask.json.dumps({'a': 1, 'b': 2}),
content_type='application/json')
self.assert_equal(rv.data, b'3')
def test_template_escaping(self):
app = flask.Flask(__name__)
render = flask.render_template_string
with app.test_request_context():
rv = flask.json.htmlsafe_dumps('</script>')
self.assert_equal(rv, u'"\\u003c/script\\u003e"')
self.assert_equal(type(rv), text_type)
rv = render('{{ "</script>"|tojson }}')
self.assert_equal(rv, '"\\u003c/script\\u003e"')
rv = render('{{ "<\0/script>"|tojson }}')
self.assert_equal(rv, '"\\u003c\\u0000/script\\u003e"')
rv = render('{{ "<!--<script>"|tojson }}')
self.assert_equal(rv, '"\\u003c!--\\u003cscript\\u003e"')
rv = render('{{ "&"|tojson }}')
self.assert_equal(rv, '"\\u0026"')
rv = render('{{ "\'"|tojson }}')
self.assert_equal(rv, '"\\u0027"')
rv = render("<a ng-data='{{ data|tojson }}'></a>",
data={'x': ["foo", "bar", "baz'"]})
self.assert_equal(rv,
'<a ng-data=\'{"x": ["foo", "bar", "baz\\u0027"]}\'></a>')
def test_json_customization(self):
class X(object):
def __init__(self, val):
self.val = val
class MyEncoder(flask.json.JSONEncoder):
def default(self, o):
if isinstance(o, X):
return '<%d>' % o.val
return flask.json.JSONEncoder.default(self, o)
class MyDecoder(flask.json.JSONDecoder):
def __init__(self, *args, **kwargs):
kwargs.setdefault('object_hook', self.object_hook)
flask.json.JSONDecoder.__init__(self, *args, **kwargs)
def object_hook(self, obj):
if len(obj) == 1 and '_foo' in obj:
return X(obj['_foo'])
return obj
app = flask.Flask(__name__)
app.testing = True
app.json_encoder = MyEncoder
app.json_decoder = MyDecoder
@app.route('/', methods=['POST'])
def index():
return flask.json.dumps(flask.request.get_json()['x'])
c = app.test_client()
rv = c.post('/', data=flask.json.dumps({
'x': {'_foo': 42}
}), content_type='application/json')
self.assertEqual(rv.data, b'"<42>"')
def test_modified_url_encoding(self):
class ModifiedRequest(flask.Request):
url_charset = 'euc-kr'
app = flask.Flask(__name__)
app.testing = True
app.request_class = ModifiedRequest
app.url_map.charset = 'euc-kr'
@app.route('/')
def index():
return flask.request.args['foo']
rv = app.test_client().get(u'/?foo=정상처리'.encode('euc-kr'))
self.assert_equal(rv.status_code, 200)
self.assert_equal(rv.data, u'정상처리'.encode('utf-8'))
if not has_encoding('euc-kr'):
test_modified_url_encoding = None
def test_json_key_sorting(self):
app = flask.Flask(__name__)
app.testing = True
self.assert_equal(app.config['JSON_SORT_KEYS'], True)
d = dict.fromkeys(range(20), 'foo')
@app.route('/')
def index():
return flask.jsonify(values=d)
c = app.test_client()
rv = c.get('/')
lines = [x.strip() for x in rv.data.strip().decode('utf-8').splitlines()]
self.assert_equal(lines, [
'{',
'"values": {',
'"0": "foo",',
'"1": "foo",',
'"2": "foo",',
'"3": "foo",',
'"4": "foo",',
'"5": "foo",',
'"6": "foo",',
'"7": "foo",',
'"8": "foo",',
'"9": "foo",',
'"10": "foo",',
'"11": "foo",',
'"12": "foo",',
'"13": "foo",',
'"14": "foo",',
'"15": "foo",',
'"16": "foo",',
'"17": "foo",',
'"18": "foo",',
'"19": "foo"',
'}',
'}'
])
class SendfileTestCase(FlaskTestCase):
def test_send_file_regular(self):
app = flask.Flask(__name__)
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_equal(rv.mimetype, 'text/html')
with app.open_resource('static/index.html') as f:
rv.direct_passthrough = False
self.assert_equal(rv.data, f.read())
rv.close()
def test_send_file_xsendfile(self):
app = flask.Flask(__name__)
app.use_x_sendfile = True
with app.test_request_context():
rv = flask.send_file('static/index.html')
self.assert_true(rv.direct_passthrough)
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
def test_send_file_object(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
rv.direct_passthrough = False
with app.open_resource('static/index.html') as f:
self.assert_equal(rv.data, f.read())
self.assert_equal(rv.mimetype, 'text/html')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f)
self.assert_equal(rv.mimetype, 'text/html')
self.assert_in('x-sendfile', rv.headers)
self.assert_equal(rv.headers['x-sendfile'],
os.path.join(app.root_path, 'static/index.html'))
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
app.use_x_sendfile = False
with app.test_request_context():
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f)
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'application/octet-stream')
rv.close()
# etags
self.assert_equal(len(captured), 1)
with catch_warnings() as captured:
f = StringIO('Test')
rv = flask.send_file(f, mimetype='text/plain')
rv.direct_passthrough = False
self.assert_equal(rv.data, b'Test')
self.assert_equal(rv.mimetype, 'text/plain')
rv.close()
# etags
self.assert_equal(len(captured), 1)
app.use_x_sendfile = True
with catch_warnings() as captured:
with app.test_request_context():
f = StringIO('Test')
rv = flask.send_file(f)
self.assert_not_in('x-sendfile', rv.headers)
rv.close()
# etags
self.assert_equal(len(captured), 1)
def test_attachment(self):
app = flask.Flask(__name__)
with catch_warnings() as captured:
with app.test_request_context():
f = open(os.path.join(app.root_path, 'static/index.html'))
rv = flask.send_file(f, as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
rv.close()
# mimetypes + etag
self.assert_equal(len(captured), 2)
with app.test_request_context():
self.assert_equal(options['filename'], 'index.html')
rv = flask.send_file('static/index.html', as_attachment=True)
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.html')
rv.close()
with app.test_request_context():
rv = flask.send_file(StringIO('Test'), as_attachment=True,
attachment_filename='index.txt',
add_etags=False)
self.assert_equal(rv.mimetype, 'text/plain')
value, options = parse_options_header(rv.headers['Content-Disposition'])
self.assert_equal(value, 'attachment')
self.assert_equal(options['filename'], 'index.txt')
rv.close()
def test_static_file(self):
app = flask.Flask(__name__)
# default cache timeout is 12 hours
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 12 * 60 * 60)
rv.close()
app.config['SEND_FILE_MAX_AGE_DEFAULT'] = 3600
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 3600)
rv.close()
class StaticFileApp(flask.Flask):
def get_send_file_max_age(self, filename):
return 10
app = StaticFileApp(__name__)
with app.test_request_context():
# Test with static file handler.
rv = app.send_static_file('index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
# Test again with direct use of send_file utility.
rv = flask.send_file('static/index.html')
cc = parse_cache_control_header(rv.headers['Cache-Control'])
self.assert_equal(cc.max_age, 10)
rv.close()
class LoggingTestCase(FlaskTestCase):
def test_logger_cache(self):
app = flask.Flask(__name__)
logger1 = app.logger
self.assert_true(app.logger is logger1)
self.assert_equal(logger1.name, __name__)
app.logger_name = __name__ + '/test_logger_cache'
self.assert_true(app.logger is not logger1)
def test_debug_log(self):
app = flask.Flask(__name__)
app.debug = True
@app.route('/')
def index():
app.logger.warning('the standard library is dead')
app.logger.debug('this is a debug statement')
return ''
@app.route('/exc')
def exc():
1 // 0
with app.test_client() as c:
with catch_stderr() as err:
c.get('/')
out = err.getvalue()
self.assert_in('WARNING in helpers [', out)
self.assert_in(os.path.basename(__file__.rsplit('.', 1)[0] + '.py'), out)
self.assert_in('the standard library is dead', out)
self.assert_in('this is a debug statement', out)
with catch_stderr() as err:
try:
c.get('/exc')
except ZeroDivisionError:
pass
else:
self.assert_true(False, 'debug log ate the exception')
def test_debug_log_override(self):
app = flask.Flask(__name__)
app.debug = True
app.logger_name = 'flask_tests/test_debug_log_override'
app.logger.level = 10
self.assert_equal(app.logger.level, 10)
def test_exception_logging(self):
out = StringIO()
app = flask.Flask(__name__)
app.logger_name = 'flask_tests/test_exception_logging'
app.logger.addHandler(StreamHandler(out))
@app.route('/')
def index():
1 // 0
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_in(b'Internal Server Error', rv.data)
err = out.getvalue()
self.assert_in('Exception on / [GET]', err)
self.assert_in('Traceback (most recent call last):', err)
self.assert_in('1 // 0', err)
self.assert_in('ZeroDivisionError:', err)
def test_processor_exceptions(self):
app = flask.Flask(__name__)
@app.before_request
def before_request():
if trigger == 'before':
1 // 0
@app.after_request
def after_request(response):
if trigger == 'after':
1 // 0
return response
@app.route('/')
def index():
return 'Foo'
@app.errorhandler(500)
def internal_server_error(e):
return 'Hello Server Error', 500
for trigger in 'before', 'after':
rv = app.test_client().get('/')
self.assert_equal(rv.status_code, 500)
self.assert_equal(rv.data, b'Hello Server Error')
def test_url_for_with_anchor(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index', _anchor='x y'),
'/#x%20y')
def test_url_for_with_scheme(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_equal(flask.url_for('index',
_external=True,
_scheme='https'),
'https://localhost/')
def test_url_for_with_scheme_not_external(self):
app = flask.Flask(__name__)
@app.route('/')
def index():
return '42'
with app.test_request_context():
self.assert_raises(ValueError,
flask.url_for,
'index',
_scheme='https')
def test_url_with_method(self):
from flask.views import MethodView
app = flask.Flask(__name__)
class MyView(MethodView):
def get(self, id=None):
if id is None:
return 'List'
return 'Get %d' % id
def post(self):
return 'Create'
myview = MyView.as_view('myview')
app.add_url_rule('/myview/', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/<int:id>', methods=['GET'],
view_func=myview)
app.add_url_rule('/myview/create', methods=['POST'],
view_func=myview)
with app.test_request_context():
self.assert_equal(flask.url_for('myview', _method='GET'),
'/myview/')
self.assert_equal(flask.url_for('myview', id=42, _method='GET'),
'/myview/42')
self.assert_equal(flask.url_for('myview', _method='POST'),
'/myview/create')
class NoImportsTestCase(FlaskTestCase):
"""Test Flasks are created without import.
Avoiding ``__import__`` helps create Flask instances where there are errors
at import time. Those runtime errors will be apparent to the user soon
enough, but tools which build Flask instances meta-programmatically benefit
from a Flask which does not ``__import__``. Instead of importing to
retrieve file paths or metadata on a module or package, use the pkgutil and
imp modules in the Python standard library.
"""
def test_name_with_import_error(self):
try:
flask.Flask('importerror')
except NotImplementedError:
self.fail('Flask(import_name) is importing import_name.')
class StreamingTestCase(FlaskTestCase):
def test_streaming_with_context(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(generate()))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_as_decorator(self):
app = flask.Flask(__name__)
app.testing = True
@app.route('/')
def index():
@flask.stream_with_context
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(generate())
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
def test_streaming_with_context_and_custom_close(self):
app = flask.Flask(__name__)
app.testing = True
called = []
class Wrapper(object):
def __init__(self, gen):
self._gen = gen
def __iter__(self):
return self
def close(self):
called.append(42)
def __next__(self):
return next(self._gen)
next = __next__
@app.route('/')
def index():
def generate():
yield 'Hello '
yield flask.request.args['name']
yield '!'
return flask.Response(flask.stream_with_context(
Wrapper(generate())))
c = app.test_client()
rv = c.get('/?name=World')
self.assertEqual(rv.data, b'Hello World!')
self.assertEqual(called, [42])
def suite():
suite = unittest.TestSuite()
if flask.json_available:
suite.addTest(unittest.makeSuite(JSONTestCase))
suite.addTest(unittest.makeSuite(SendfileTestCase))
suite.addTest(unittest.makeSuite(LoggingTestCase))
suite.addTest(unittest.makeSuite(NoImportsTestCase))
suite.addTest(unittest.makeSuite(StreamingTestCase))
return suite
|
agpl-3.0
|
llhe/tensorflow
|
tensorflow/contrib/keras/python/keras/datasets/cifar.py
|
86
|
1542
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities used by the CIFAR10 and CIFAR100 datasets.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from six.moves import cPickle
def load_batch(fpath, label_key='labels'):
"""Internal utility for parsing CIFAR data.
Arguments:
fpath: path the file to parse.
label_key: key for label data in the retrieve
dictionary.
Returns:
A tuple `(data, labels)`.
"""
f = open(fpath, 'rb')
if sys.version_info < (3,):
d = cPickle.load(f)
else:
d = cPickle.load(f, encoding='bytes')
# decode utf8
d_decoded = {}
for k, v in d.items():
d_decoded[k.decode('utf8')] = v
d = d_decoded
f.close()
data = d['data']
labels = d[label_key]
data = data.reshape(data.shape[0], 3, 32, 32)
return data, labels
|
apache-2.0
|
Wopple/fimbulvetr
|
src/client/fx.py
|
1
|
3637
|
import os
import sys
import pygame
from common.constants import *
from client.constants import *
from common import boundint
from common.util.rect import Rect
class FX(object):
def __init__(self, inPos, inFacing, inType):
self.preciseLoc = inPos
self.facingRight = inFacing
self.frames = []
self.setType(inType)
self.frameNum = 0
self.subframeNum = 0
self.removeFlag = False
self.setImage()
def update(self):
self.subframeNum += 1
if self.subframeNum >= self.frames[self.frameNum].length:
self.subframeNum = 0
self.frameNum += 1
if self.frameNum == len(self.frames):
self.removeFlag = True
else:
self.setImage()
self.rect.topleft = self.getRectPos()
def draw(self, screen, inOffset):
screen.blit(self.image, add_points(self.rect.topleft, inOffset))
def setImage(self):
f = self.frames[self.frameNum]
inImage = f.image
o = f.offset
size = inImage.get_size()
if self.facingRight:
offset = o
self.image = inImage
else:
offset = (-o[0] + size[0], o[1])
self.image = pygame.transform.flip(inImage, True, False)
self.offset = offset
self.rect = Rect(self.getRectPos(), size)
def getRectPos(self):
return ( int(self.preciseLoc[0]) - self.offset[0],
int(self.preciseLoc[1]) - self.offset[1] )
def setType(self, t):
if t == 'pow':
f = [ [0, 3],
[1, 1],
[2, 1],
[3, 1] ]
elif t == 'side':
f = [ [7, 3],
[4, 1],
[5, 1],
[6 , 1] ]
elif t == 'block':
f = [ [8, 3],
[9, 1],
[10, 1],
[11, 1] ]
elif t == 'grab':
f = [ [12, 4],
[13, 3]]
elif t == 'dust':
f = [ [14, 3],
[15, 2],
[16, 2],
[17, 2],
[18, 2] ]
elif t == 'shockwave':
f = [ [19, 2],
[20, 1],
[21, 1],
[22, 1],
[23, 1] ]
elif t == 'airelementshockwave':
f = [ [24, 2],
[25, 1],
[26, 1],
[27, 1],
[28, 1] ]
elif t == 'runicexplosion':
f = [ [29, 2],
[30, 1],
[31, 2],
[32, 2],
[33, 2] ]
elif t == 'runicflame1':
f = [ [35, 3],
[36, 1],
[37, 1],
[38, 1] ]
elif t == 'runicflame2':
f = [ [39, 3],
[40, 2],
[41, 1]]
elif t == 'runicflame3':
f = [ [42, 3],
[43, 2],
[44, 1]]
elif t == 'fullshockwave':
f = [ [45, 3],
[46, 2],
[47, 2],
[48, 2],
[49, 2],
[50, 2] ]
for i in f:
self.frames.append(Frame(FX_IMAGES[i[0]][0],
FX_IMAGES[i[0]][1], i[1]))
class Frame(object):
def __init__(self, inImage, inOffset, inLength):
self.image = inImage
self.offset = inOffset
self.length = inLength
|
bsd-3-clause
|
troya2/pjsip
|
pjsip-apps/src/pygui/application.py
|
24
|
15342
|
# $Id: application.py 4798 2014-03-19 21:20:17Z bennylp $
#
# pjsua Python GUI Demo
#
# Copyright (C)2013 Teluu Inc. (http://www.teluu.com)
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import sys
if sys.version_info[0] >= 3: # Python 3
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox as msgbox
else:
import Tkinter as tk
import tkMessageBox as msgbox
import ttk
import pjsua2 as pj
import log
import accountsetting
import account
import buddy
import endpoint
import settings
import os
import traceback
# You may try to enable pjsua worker thread by setting USE_THREADS below to True *and*
# recreate the swig module with adding -threads option to swig (uncomment USE_THREADS
# in swig/python/Makefile). In my experiment this would crash Python as reported in:
# http://lists.pjsip.org/pipermail/pjsip_lists.pjsip.org/2014-March/017223.html
USE_THREADS = False
class Application(ttk.Frame):
"""
The Application main frame.
"""
def __init__(self):
global USE_THREADS
ttk.Frame.__init__(self, name='application', width=300, height=500)
self.pack(expand='yes', fill='both')
self.master.title('pjsua2 Demo')
self.master.geometry('500x500+100+100')
# Logger
self.logger = log.Logger()
# Accounts
self.accList = []
# GUI variables
self.showLogWindow = tk.IntVar(value=0)
self.quitting = False
# Construct GUI
self._createWidgets()
# Log window
self.logWindow = log.LogWindow(self)
self._onMenuShowHideLogWindow()
# Instantiate endpoint
self.ep = endpoint.Endpoint()
self.ep.libCreate()
# Default config
self.appConfig = settings.AppConfig()
if USE_THREADS:
self.appConfig.epConfig.uaConfig.threadCnt = 1
self.appConfig.epConfig.uaConfig.mainThreadOnly = False
else:
self.appConfig.epConfig.uaConfig.threadCnt = 0
self.appConfig.epConfig.uaConfig.mainThreadOnly = True
self.appConfig.epConfig.logConfig.writer = self.logger
self.appConfig.epConfig.logConfig.filename = "pygui.log"
self.appConfig.epConfig.logConfig.fileFlags = pj.PJ_O_APPEND
self.appConfig.epConfig.logConfig.level = 5
self.appConfig.epConfig.logConfig.consoleLevel = 5
def saveConfig(self, filename='pygui.js'):
# Save disabled accounts since they are not listed in self.accList
disabled_accs = [ac for ac in self.appConfig.accounts if not ac.enabled]
self.appConfig.accounts = []
# Get account configs from active accounts
for acc in self.accList:
acfg = settings.AccConfig()
acfg.enabled = True
acfg.config = acc.cfg
for bud in acc.buddyList:
acfg.buddyConfigs.append(bud.cfg)
self.appConfig.accounts.append(acfg)
# Put back disabled accounts
self.appConfig.accounts.extend(disabled_accs)
# Save
self.appConfig.saveFile(filename)
def start(self, cfg_file='pygui.js'):
global USE_THREADS
# Load config
if cfg_file and os.path.exists(cfg_file):
self.appConfig.loadFile(cfg_file)
if USE_THREADS:
self.appConfig.epConfig.uaConfig.threadCnt = 1
self.appConfig.epConfig.uaConfig.mainThreadOnly = False
else:
self.appConfig.epConfig.uaConfig.threadCnt = 0
self.appConfig.epConfig.uaConfig.mainThreadOnly = True
self.appConfig.epConfig.uaConfig.threadCnt = 0
self.appConfig.epConfig.uaConfig.mainThreadOnly = True
self.appConfig.epConfig.logConfig.writer = self.logger
self.appConfig.epConfig.logConfig.level = 5
self.appConfig.epConfig.logConfig.consoleLevel = 5
# Initialize library
self.appConfig.epConfig.uaConfig.userAgent = "pygui-" + self.ep.libVersion().full;
self.ep.libInit(self.appConfig.epConfig)
self.master.title('pjsua2 Demo version ' + self.ep.libVersion().full)
# Create transports
if self.appConfig.udp.enabled:
self.ep.transportCreate(self.appConfig.udp.type, self.appConfig.udp.config)
if self.appConfig.tcp.enabled:
self.ep.transportCreate(self.appConfig.tcp.type, self.appConfig.tcp.config)
if self.appConfig.tls.enabled:
self.ep.transportCreate(self.appConfig.tls.type, self.appConfig.tls.config)
# Add accounts
for cfg in self.appConfig.accounts:
if cfg.enabled:
self._createAcc(cfg.config)
acc = self.accList[-1]
for buddy_cfg in cfg.buddyConfigs:
self._createBuddy(acc, buddy_cfg)
# Start library
self.ep.libStart()
# Start polling
if not USE_THREADS:
self._onTimer()
def updateAccount(self, acc):
if acc.deleting:
return # ignore
iid = str(acc.randId)
text = acc.cfg.idUri
status = acc.statusText()
values = (status,)
if self.tv.exists(iid):
self.tv.item(iid, text=text, values=values)
else:
self.tv.insert('', 'end', iid, open=True, text=text, values=values)
def updateBuddy(self, bud):
iid = 'buddy' + str(bud.randId)
text = bud.cfg.uri
status = bud.statusText()
values = (status,)
if self.tv.exists(iid):
self.tv.item(iid, text=text, values=values)
else:
self.tv.insert(str(bud.account.randId), 'end', iid, open=True, text=text, values=values)
def _createAcc(self, acc_cfg):
acc = account.Account(self)
acc.cfg = acc_cfg
self.accList.append(acc)
self.updateAccount(acc)
acc.create(acc.cfg)
acc.cfgChanged = False
self.updateAccount(acc)
def _createBuddy(self, acc, buddy_cfg):
bud = buddy.Buddy(self)
bud.cfg = buddy_cfg
bud.account = acc
bud.create(acc, bud.cfg)
self.updateBuddy(bud)
acc.buddyList.append(bud)
def _createWidgets(self):
self._createAppMenu()
# Main pane, a Treeview
self.tv = ttk.Treeview(self, columns=('Status'), show='tree')
self.tv.pack(side='top', fill='both', expand='yes', padx=5, pady=5)
self._createContextMenu()
# Handle close event
self.master.protocol("WM_DELETE_WINDOW", self._onClose)
def _createAppMenu(self):
# Main menu bar
top = self.winfo_toplevel()
self.menubar = tk.Menu()
top.configure(menu=self.menubar)
# File menu
file_menu = tk.Menu(self.menubar, tearoff=False)
self.menubar.add_cascade(label="File", menu=file_menu)
file_menu.add_command(label="Add account..", command=self._onMenuAddAccount)
file_menu.add_checkbutton(label="Show/hide log window", command=self._onMenuShowHideLogWindow, variable=self.showLogWindow)
file_menu.add_separator()
file_menu.add_command(label="Settings...", command=self._onMenuSettings)
file_menu.add_command(label="Save Settings", command=self._onMenuSaveSettings)
file_menu.add_separator()
file_menu.add_command(label="Quit", command=self._onMenuQuit)
# Window menu
self.window_menu = tk.Menu(self.menubar, tearoff=False)
self.menubar.add_cascade(label="Window", menu=self.window_menu)
# Help menu
help_menu = tk.Menu(self.menubar, tearoff=False)
self.menubar.add_cascade(label="Help", menu=help_menu)
help_menu.add_command(label="About", underline=2, command=self._onMenuAbout)
def _showChatWindow(self, chat_inst):
chat_inst.showWindow()
def updateWindowMenu(self):
# Chat windows
self.window_menu.delete(0, tk.END)
for acc in self.accList:
for c in acc.chatList:
cmd = lambda arg=c: self._showChatWindow(arg)
self.window_menu.add_command(label=c.title, command=cmd)
def _createContextMenu(self):
top = self.winfo_toplevel()
# Create Account context menu
self.accMenu = tk.Menu(top, tearoff=False)
# Labels, must match with _onAccContextMenu()
labels = ['Unregister', 'Reregister', 'Add buddy...', '-',
'Online', 'Invisible', 'Away', 'Busy', '-',
'Settings...', '-',
'Delete...']
for label in labels:
if label=='-':
self.accMenu.add_separator()
else:
cmd = lambda arg=label: self._onAccContextMenu(arg)
self.accMenu.add_command(label=label, command=cmd)
# Create Buddy context menu
# Labels, must match with _onBuddyContextMenu()
self.buddyMenu = tk.Menu(top, tearoff=False)
labels = ['Audio call', 'Send instant message', '-',
'Subscribe', 'Unsubscribe', '-',
'Settings...', '-',
'Delete...']
for label in labels:
if label=='-':
self.buddyMenu.add_separator()
else:
cmd = lambda arg=label: self._onBuddyContextMenu(arg)
self.buddyMenu.add_command(label=label, command=cmd)
if (top.tk.call('tk', 'windowingsystem')=='aqua'):
self.tv.bind('<2>', self._onTvRightClick)
self.tv.bind('<Control-1>', self._onTvRightClick)
else:
self.tv.bind('<3>', self._onTvRightClick)
self.tv.bind('<Double-Button-1>', self._onTvDoubleClick)
def _getSelectedAccount(self):
items = self.tv.selection()
if not items:
return None
try:
iid = int(items[0])
except:
return None
accs = [acc for acc in self.accList if acc.randId==iid]
if not accs:
return None
return accs[0]
def _getSelectedBuddy(self):
items = self.tv.selection()
if not items:
return None
try:
iid = int(items[0][5:])
iid_parent = int(self.tv.parent(items[0]))
except:
return None
accs = [acc for acc in self.accList if acc.randId==iid_parent]
if not accs:
return None
buds = [b for b in accs[0].buddyList if b.randId==iid]
if not buds:
return None
return buds[0]
def _onTvRightClick(self, event):
iid = self.tv.identify_row(event.y)
#iid = self.tv.identify('item', event.x, event.y)
if iid:
self.tv.selection_set( (iid,) )
acc = self._getSelectedAccount()
if acc:
self.accMenu.post(event.x_root, event.y_root)
else:
# A buddy is selected
self.buddyMenu.post(event.x_root, event.y_root)
def _onTvDoubleClick(self, event):
iid = self.tv.identify_row(event.y)
if iid:
self.tv.selection_set( (iid,) )
acc = self._getSelectedAccount()
if acc:
self.cfgChanged = False
dlg = accountsetting.Dialog(self.master, acc.cfg)
if dlg.doModal():
self.updateAccount(acc)
acc.modify(acc.cfg)
else:
bud = self._getSelectedBuddy()
acc = bud.account
chat = acc.findChat(bud.cfg.uri)
if not chat:
chat = acc.newChat(bud.cfg.uri)
chat.showWindow()
def _onAccContextMenu(self, label):
acc = self._getSelectedAccount()
if not acc:
return
if label=='Unregister':
acc.setRegistration(False)
elif label=='Reregister':
acc.setRegistration(True)
elif label=='Online':
ps = pj.PresenceStatus()
ps.status = pj.PJSUA_BUDDY_STATUS_ONLINE
acc.setOnlineStatus(ps)
elif label=='Invisible':
ps = pj.PresenceStatus()
ps.status = pj.PJSUA_BUDDY_STATUS_OFFLINE
acc.setOnlineStatus(ps)
elif label=='Away':
ps = pj.PresenceStatus()
ps.status = pj.PJSUA_BUDDY_STATUS_ONLINE
ps.activity = pj.PJRPID_ACTIVITY_AWAY
ps.note = "Away"
acc.setOnlineStatus(ps)
elif label=='Busy':
ps = pj.PresenceStatus()
ps.status = pj.PJSUA_BUDDY_STATUS_ONLINE
ps.activity = pj.PJRPID_ACTIVITY_BUSY
ps.note = "Busy"
acc.setOnlineStatus(ps)
elif label=='Settings...':
self.cfgChanged = False
dlg = accountsetting.Dialog(self.master, acc.cfg)
if dlg.doModal():
self.updateAccount(acc)
acc.modify(acc.cfg)
elif label=='Delete...':
msg = "Do you really want to delete account '%s'?" % acc.cfg.idUri
if msgbox.askquestion('Delete account?', msg, default=msgbox.NO) != u'yes':
return
iid = str(acc.randId)
self.accList.remove(acc)
acc.setRegistration(False)
acc.deleting = True
del acc
self.tv.delete( (iid,) )
elif label=='Add buddy...':
cfg = pj.BuddyConfig()
dlg = buddy.SettingDialog(self.master, cfg)
if dlg.doModal():
self._createBuddy(acc, cfg)
else:
assert not ("Unknown menu " + label)
def _onBuddyContextMenu(self, label):
bud = self._getSelectedBuddy()
if not bud:
return
acc = bud.account
if label=='Audio call':
chat = acc.findChat(bud.cfg.uri)
if not chat: chat = acc.newChat(bud.cfg.uri)
chat.showWindow()
chat.startCall()
elif label=='Send instant message':
chat = acc.findChat(bud.cfg.uri)
if not chat: chat = acc.newChat(bud.cfg.uri)
chat.showWindow(True)
elif label=='Subscribe':
bud.subscribePresence(True)
elif label=='Unsubscribe':
bud.subscribePresence(False)
elif label=='Settings...':
subs = bud.cfg.subscribe
uri = bud.cfg.uri
dlg = buddy.SettingDialog(self.master, bud.cfg)
if dlg.doModal():
self.updateBuddy(bud)
# URI updated?
if uri != bud.cfg.uri:
cfg = bud.cfg
# del old
iid = 'buddy' + str(bud.randId)
acc.buddyList.remove(bud)
del bud
self.tv.delete( (iid,) )
# add new
self._createBuddy(acc, cfg)
# presence subscribe setting updated
elif subs != bud.cfg.subscribe:
bud.subscribePresence(bud.cfg.subscribe)
elif label=='Delete...':
msg = "Do you really want to delete buddy '%s'?" % bud.cfg.uri
if msgbox.askquestion('Delete buddy?', msg, default=msgbox.NO) != u'yes':
return
iid = 'buddy' + str(bud.randId)
acc.buddyList.remove(bud)
del bud
self.tv.delete( (iid,) )
else:
assert not ("Unknown menu " + label)
def _onTimer(self):
if not self.quitting:
self.ep.libHandleEvents(10)
if not self.quitting:
self.master.after(50, self._onTimer)
def _onClose(self):
self.saveConfig()
self.quitting = True
self.ep.libDestroy()
self.ep = None
self.update()
self.quit()
def _onMenuAddAccount(self):
cfg = pj.AccountConfig()
dlg = accountsetting.Dialog(self.master, cfg)
if dlg.doModal():
self._createAcc(cfg)
def _onMenuShowHideLogWindow(self):
if self.showLogWindow.get():
self.logWindow.deiconify()
else:
self.logWindow.withdraw()
def _onMenuSettings(self):
dlg = settings.Dialog(self, self.appConfig)
if dlg.doModal():
msgbox.showinfo(self.master.title(), 'You need to restart for new settings to take effect')
def _onMenuSaveSettings(self):
self.saveConfig()
def _onMenuQuit(self):
self._onClose()
def _onMenuAbout(self):
msgbox.showinfo(self.master.title(), 'About')
class ExceptionCatcher:
"""Custom Tk exception catcher, mainly to display more information
from pj.Error exception
"""
def __init__(self, func, subst, widget):
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
try:
if self.subst:
args = apply(self.subst, args)
return apply(self.func, args)
except pj.Error, error:
print 'Exception:'
print ' ', error.info()
print 'Traceback:'
print traceback.print_stack()
log.writeLog2(1, 'Exception: ' + error.info() + '\n')
except Exception, error:
print 'Exception:'
print ' ', str(error)
print 'Traceback:'
print traceback.print_stack()
log.writeLog2(1, 'Exception: ' + str(error) + '\n')
def main():
#tk.CallWrapper = ExceptionCatcher
app = Application()
app.start()
app.mainloop()
if __name__ == '__main__':
main()
|
gpl-2.0
|
rockyzhang/zhangyanhit-python-for-android-mips
|
python-modules/twisted/twisted/words/xish/xmlstream.py
|
54
|
8534
|
# -*- test-case-name: twisted.words.test.test_xmlstream -*-
#
# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
XML Stream processing.
An XML Stream is defined as a connection over which two XML documents are
exchanged during the lifetime of the connection, one for each direction. The
unit of interaction is a direct child element of the root element (stanza).
The most prominent use of XML Streams is Jabber, but this module is generically
usable. See Twisted Words for Jabber specific protocol support.
Maintainer: Ralph Meijer
"""
from twisted.python import failure
from twisted.internet import protocol
from twisted.words.xish import domish, utility
STREAM_CONNECTED_EVENT = intern("//event/stream/connected")
STREAM_START_EVENT = intern("//event/stream/start")
STREAM_END_EVENT = intern("//event/stream/end")
STREAM_ERROR_EVENT = intern("//event/stream/error")
class XmlStream(protocol.Protocol, utility.EventDispatcher):
""" Generic Streaming XML protocol handler.
This protocol handler will parse incoming data as XML and dispatch events
accordingly. Incoming stanzas can be handled by registering observers using
XPath-like expressions that are matched against each stanza. See
L{utility.EventDispatcher} for details.
"""
def __init__(self):
utility.EventDispatcher.__init__(self)
self.stream = None
self.rawDataOutFn = None
self.rawDataInFn = None
def _initializeStream(self):
""" Sets up XML Parser. """
self.stream = domish.elementStream()
self.stream.DocumentStartEvent = self.onDocumentStart
self.stream.ElementEvent = self.onElement
self.stream.DocumentEndEvent = self.onDocumentEnd
### --------------------------------------------------------------
###
### Protocol events
###
### --------------------------------------------------------------
def connectionMade(self):
""" Called when a connection is made.
Sets up the XML parser and dispatches the L{STREAM_CONNECTED_EVENT}
event indicating the connection has been established.
"""
self._initializeStream()
self.dispatch(self, STREAM_CONNECTED_EVENT)
def dataReceived(self, data):
""" Called whenever data is received.
Passes the data to the XML parser. This can result in calls to the
DOM handlers. If a parse error occurs, the L{STREAM_ERROR_EVENT} event
is called to allow for cleanup actions, followed by dropping the
connection.
"""
try:
if self.rawDataInFn:
self.rawDataInFn(data)
self.stream.parse(data)
except domish.ParserError:
self.dispatch(failure.Failure(), STREAM_ERROR_EVENT)
self.transport.loseConnection()
def connectionLost(self, reason):
""" Called when the connection is shut down.
Dispatches the L{STREAM_END_EVENT}.
"""
self.dispatch(self, STREAM_END_EVENT)
self.stream = None
### --------------------------------------------------------------
###
### DOM events
###
### --------------------------------------------------------------
def onDocumentStart(self, rootElement):
""" Called whenever the start tag of a root element has been received.
Dispatches the L{STREAM_START_EVENT}.
"""
self.dispatch(self, STREAM_START_EVENT)
def onElement(self, element):
""" Called whenever a direct child element of the root element has
been received.
Dispatches the received element.
"""
self.dispatch(element)
def onDocumentEnd(self):
""" Called whenever the end tag of the root element has been received.
Closes the connection. This causes C{connectionLost} being called.
"""
self.transport.loseConnection()
def setDispatchFn(self, fn):
""" Set another function to handle elements. """
self.stream.ElementEvent = fn
def resetDispatchFn(self):
""" Set the default function (C{onElement}) to handle elements. """
self.stream.ElementEvent = self.onElement
def send(self, obj):
""" Send data over the stream.
Sends the given C{obj} over the connection. C{obj} may be instances of
L{domish.Element}, L{unicode} and L{str}. The first two will be
properly serialized and/or encoded. L{str} objects must be in UTF-8
encoding.
Note: because it is easy to make mistakes in maintaining a properly
encoded L{str} object, it is advised to use L{unicode} objects
everywhere when dealing with XML Streams.
@param obj: Object to be sent over the stream.
@type obj: L{domish.Element}, L{domish} or L{str}
"""
if domish.IElement.providedBy(obj):
obj = obj.toXml()
if isinstance(obj, unicode):
obj = obj.encode('utf-8')
if self.rawDataOutFn:
self.rawDataOutFn(obj)
self.transport.write(obj)
class BootstrapMixin(object):
"""
XmlStream factory mixin to install bootstrap event observers.
This mixin is for factories providing
L{IProtocolFactory<twisted.internet.interfaces.IProtocolFactory>} to make
sure bootstrap event observers are set up on protocols, before incoming
data is processed. Such protocols typically derive from
L{utility.EventDispatcher}, like L{XmlStream}.
You can set up bootstrap event observers using C{addBootstrap}. The
C{event} and C{fn} parameters correspond with the C{event} and
C{observerfn} arguments to L{utility.EventDispatcher.addObserver}.
@since: 8.2.
@ivar bootstraps: The list of registered bootstrap event observers.
@type bootstrap: C{list}
"""
def __init__(self):
self.bootstraps = []
def installBootstraps(self, dispatcher):
"""
Install registered bootstrap observers.
@param dispatcher: Event dispatcher to add the observers to.
@type dispatcher: L{utility.EventDispatcher}
"""
for event, fn in self.bootstraps:
dispatcher.addObserver(event, fn)
def addBootstrap(self, event, fn):
"""
Add a bootstrap event handler.
@param event: The event to register an observer for.
@type event: C{str} or L{xpath.XPathQuery}
@param fn: The observer callable to be registered.
"""
self.bootstraps.append((event, fn))
def removeBootstrap(self, event, fn):
"""
Remove a bootstrap event handler.
@param event: The event the observer is registered for.
@type event: C{str} or L{xpath.XPathQuery}
@param fn: The registered observer callable.
"""
self.bootstraps.remove((event, fn))
class XmlStreamFactoryMixin(BootstrapMixin):
"""
XmlStream factory mixin that takes care of event handlers.
All positional and keyword arguments passed to create this factory are
passed on as-is to the protocol.
@ivar args: Positional arguments passed to the protocol upon instantiation.
@type args: C{tuple}.
@ivar kwargs: Keyword arguments passed to the protocol upon instantiation.
@type kwargs: C{dict}.
"""
def __init__(self, *args, **kwargs):
BootstrapMixin.__init__(self)
self.args = args
self.kwargs = kwargs
def buildProtocol(self, addr):
"""
Create an instance of XmlStream.
The returned instance will have bootstrap event observers registered
and will proceed to handle input on an incoming connection.
"""
xs = self.protocol(*self.args, **self.kwargs)
xs.factory = self
self.installBootstraps(xs)
return xs
class XmlStreamFactory(XmlStreamFactoryMixin,
protocol.ReconnectingClientFactory):
"""
Factory for XmlStream protocol objects as a reconnection client.
"""
protocol = XmlStream
def buildProtocol(self, addr):
"""
Create a protocol instance.
Overrides L{XmlStreamFactoryMixin.buildProtocol} to work with
a L{ReconnectingClientFactory}. As this is called upon having an
connection established, we are resetting the delay for reconnection
attempts when the connection is lost again.
"""
self.resetDelay()
return XmlStreamFactoryMixin.buildProtocol(self, addr)
|
apache-2.0
|
YuriGural/erpnext
|
erpnext/schools/doctype/student_applicant/student_applicant.py
|
22
|
1107
|
# -*- coding: utf-8 -*-777777yyy
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
class StudentApplicant(Document):
def autoname(self):
from frappe.model.naming import set_name_by_naming_series
if self.student_admission:
naming_series = frappe.db.get_value('Student Admission', self.student_admission,
'naming_series_for_student_applicant')
print naming_series
if naming_series:
self.naming_series = naming_series
set_name_by_naming_series(self)
def validate(self):
self.title = " ".join(filter(None, [self.first_name, self.middle_name, self.last_name]))
def on_update_after_submit(self):
student = frappe.get_list("Student", filters= {"student_applicant": self.name})
if student:
frappe.throw(_("Cannot change status as student {0} is linked with student application {1}").format(student[0].name, self.name))
def on_payment_authorized(self, *args, **kwargs):
self.db_set('paid', 1)
|
gpl-3.0
|
mbareta/edx-platform-ft
|
cms/djangoapps/contentstore/management/commands/migrate_to_split.py
|
185
|
2174
|
"""
Django management command to migrate a course from the old Mongo modulestore
to the new split-Mongo modulestore.
"""
from django.core.management.base import BaseCommand, CommandError
from django.contrib.auth.models import User
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.split_migrator import SplitMigrator
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from xmodule.modulestore import ModuleStoreEnum
from contentstore.management.commands.utils import user_from_str
class Command(BaseCommand):
"""
Migrate a course from old-Mongo to split-Mongo. It reuses the old course id except where overridden.
"""
help = "Migrate a course from old-Mongo to split-Mongo. The new org, course, and run will default to the old one unless overridden"
args = "course_key email <new org> <new course> <new run>"
def parse_args(self, *args):
"""
Return a 5-tuple of passed in values for (course_key, user, org, course, run).
"""
if len(args) < 2:
raise CommandError(
"migrate_to_split requires at least two arguments: "
"a course_key and a user identifier (email or ID)"
)
try:
course_key = CourseKey.from_string(args[0])
except InvalidKeyError:
raise CommandError("Invalid location string")
try:
user = user_from_str(args[1])
except User.DoesNotExist:
raise CommandError("No user found identified by {}".format(args[1]))
org = course = run = None
try:
org = args[2]
course = args[3]
run = args[4]
except IndexError:
pass
return course_key, user.id, org, course, run
def handle(self, *args, **options):
course_key, user, org, course, run = self.parse_args(*args)
migrator = SplitMigrator(
source_modulestore=modulestore(),
split_modulestore=modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split),
)
migrator.migrate_mongo_course(course_key, user, org, course, run)
|
agpl-3.0
|
sowe9385/qiime
|
tests/test_plot_semivariogram.py
|
15
|
12517
|
#!/usr/bin/env python
__author__ = "Antonio Gonzalez Pena"
__copyright__ = "Copyright 2011, The QIIME Project"
__credits__ = ["Antonio Gonzalez Pena"]
__license__ = "GPL"
__version__ = "1.9.1-dev"
__maintainer__ = "Antonio Gonzalez Pena"
__email__ = "antgonza@gmail.com"
from qiime.plot_semivariogram import hist_bins, fit_semivariogram
from unittest import TestCase, main
from numpy.testing import assert_almost_equal
from numpy import asarray
class FunctionTests(TestCase):
"""Tests of top-level functions"""
def test_hist_bins(self):
""" test hist_bins """
x = asarray(
[3.,
4.12310563,
4.24264069,
4.47213595,
5.,
5.,
5.,
5.,
5.38516481,
5.65685425,
6.40312424,
6.40312424,
6.70820393,
7.,
7.07106781,
7.07106781,
7.28010989,
7.81024968,
8.,
8.06225775,
8.06225775,
8.24621125,
9.,
9.48683298,
9.48683298,
9.89949494,
9.89949494,
10.,
10.04987562,
10.04987562])
bins = [2.0, 5.0, 7.5, 10.0, 11.0]
hist_res = [0., 8., 9., 11., 2.]
vals, hist = hist_bins(bins, x)
assert_almost_equal(vals, bins)
assert_almost_equal(hist, hist_res)
def test_reorder_samples(self):
""" test that regural and irregular order give the same results """
model = "linear"
# Test normal order
x_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
x = asarray(
[[0.0,
1.0,
2.0,
3.0,
4.0,
5.0],
[0.0,
0.0,
6.0,
7.0,
8.0,
9.0],
[0.0,
0.0,
0.0,
10.0,
11.0,
12.0],
[0.0,
0.0,
0.0,
0.0,
13.0,
14.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
15.0]])
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
1.0,
2.0,
3.0,
4.0,
5.0],
[0.0,
0.0,
6.0,
7.0,
8.0,
9.0],
[0.0,
0.0,
0.0,
10.0,
11.0,
12.0],
[0.0,
0.0,
0.0,
0.0,
13.0,
14.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
15.0]])
vals_exp = [0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 6.0, 7.0]
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (x_lbl, x), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, vals_exp)
# Test altered
model = "linear"
# order = [5, 1, 3, 4, 0, 2]
x_lbl = ['s6', 's2', 's4', 's5', 's1', 's3']
x = asarray(
[[0.0,
0.0,
0.0,
0.0,
0.0,
0.0],
[9.0,
0.0,
7.0,
8.0,
0.0,
6.0],
[14.0,
0.0,
0.0,
13.0,
0.0,
0.0],
[15.0,
0.0,
0.0,
0.0,
0.0,
0.0],
[5.0,
1.0,
3.0,
4.0,
0.0,
2.0],
[12.0,
0.0,
10.0,
11.0,
0.0,
0.0]])
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
1.0,
2.0,
3.0,
4.0,
5.0],
[0.0,
0.0,
6.0,
7.0,
8.0,
9.0],
[0.0,
0.0,
0.0,
10.0,
11.0,
12.0],
[0.0,
0.0,
0.0,
0.0,
13.0,
14.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
15.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
0.0]])
vals_exp = [
1.,
2.,
3.,
4.,
5.,
6.,
7.,
8.,
9.,
10.,
11.,
12.,
13.,
14.,
15.]
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (y_lbl, y), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, vals_exp)
def test_models_semivariograms(self):
""" test the semivariogram fitting models """
# All models should return the same x_vals, y_vals, x_fit
# because we are using the same x
x_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
x = asarray(
[[0.0,
1.0,
2.0,
3.0,
4.0,
5.0],
[0.0,
0.0,
6.0,
7.0,
8.0,
9.0],
[0.0,
0.0,
0.0,
10.0,
11.0,
12.0],
[0.0,
0.0,
0.0,
0.0,
13.0,
14.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
15.0]])
vals_exp = [0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 6.0, 7.0]
model = "nugget"
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
5.0,
5.0,
5.0,
5.0,
5.0],
[0.0,
0.0,
5.0,
5.0,
5.0,
5.0],
[0.0,
0.0,
0.0,
5.0,
5.0,
5.0],
[0.0,
0.0,
0.0,
0.0,
5.0,
5.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
5.0]])
y_vals_exp = [2.3000000143667378] * (len(x) * 2)
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (x_lbl, x), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, y_vals_exp)
model = "exponential"
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
1.0,
22.0,
33.0,
44.0,
55.0],
[0.0,
0.0,
66.0,
77.0,
88.0,
99.0],
[0.0,
0.0,
0.0,
1010.0,
1111.0,
1212.0],
[0.0,
0.0,
0.0,
0.0,
1313.0,
1414.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
1515.0]])
x_vals_exp = [0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 6.0, 7.0]
y_vals_exp = [0.0, 0.0, 0.0, 0.0, 1.0, 22.0, 33.0, 44.0, 66.0, 77.0]
x_fit_exp = [0.0, 0.0, 0.0, 0.0, 1.0, 2.0, 3.0, 4.0, 6.0, 7.0]
y_fit_exp = [-1.481486808707005, -1.481486808707005, -1.481486808707005,
-1.481486808707005, 9.72783464904061, 20.937152199747878,
32.14646584698613, 43.355775583612704, 65.7743833464588,
76.983681369107]
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (y_lbl, y), model, [])
assert_almost_equal(x_vals, x_vals_exp)
assert_almost_equal(y_vals, y_vals_exp)
assert_almost_equal(x_fit, x_fit_exp)
assert_almost_equal(y_fit, y_fit_exp, decimal=2)
model = "gaussian"
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
1.0,
22.0,
33.0,
44.0,
55.0],
[0.0,
0.0,
66.0,
77.0,
88.0,
99.0],
[0.0,
0.0,
0.0,
1010.0,
1111.0,
1212.0],
[0.0,
0.0,
0.0,
0.0,
1313.0,
1414.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
1515.0]])
y_vals_exp = [0.17373844, 0.17373844, 0.17373844, 0.17373844,
0.54915099, 1.5597716 , 2.91606171, 4.2880578 ,
6.24509872, 6.74690541]
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (x_lbl, x), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, y_vals_exp, decimal=2)
model = "periodic"
y_lbl = ['s1', 's2', 's3', 's4', 's5', 's6']
y = asarray(
[[0.0,
1.0,
22.0,
33.0,
44.0,
55.0],
[0.0,
0.0,
66.0,
77.0,
88.0,
99.0],
[0.0,
0.0,
0.0,
1010.0,
1111.0,
1212.0],
[0.0,
0.0,
0.0,
0.0,
1313.0,
1414.0],
[0.0,
0.0,
0.0,
0.0,
0.0,
1515.0]])
y_vals_exp = [0.2324873886681871, 0.2324873886681871,
0.2324873886681871, 0.2324873886681871,
0.5528698895985695, 1.4508010363573784,
2.7491053124879112, 4.191607473962063,
6.39840364731269, 6.727263101495738]
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (x_lbl, x), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, y_vals_exp, decimal=2)
model = "linear"
y_lbl = x_lbl
y = x
x_vals, y_vals, x_fit, y_fit, func_text = fit_semivariogram(
(x_lbl, x), (x_lbl, x), model, [])
assert_almost_equal(x_vals, vals_exp)
assert_almost_equal(y_vals, vals_exp)
assert_almost_equal(x_fit, vals_exp)
assert_almost_equal(y_fit, vals_exp, decimal=2)
# run tests if called from command line
if __name__ == '__main__':
main()
|
gpl-2.0
|
deniszh/graphite-web
|
webapp/graphite/node.py
|
4
|
1061
|
class Node(object):
__slots__ = ('name', 'path', 'local', 'is_leaf')
def __init__(self, path):
self.path = path
self.name = path.split('.')[-1]
self.local = True
self.is_leaf = False
def __repr__(self):
return '<%s[%x]: %s>' % (self.__class__.__name__, id(self), self.path)
class BranchNode(Node):
pass
class LeafNode(Node):
__slots__ = ('reader', )
def __init__(self, path, reader):
Node.__init__(self, path)
self.reader = reader
self.is_leaf = True
def fetch(self, startTime, endTime, now=None, requestContext=None):
try:
result = self.reader.fetch(startTime, endTime, now, requestContext)
except TypeError:
# Support for legacy 3rd party, readers.
result = self.reader.fetch(startTime, endTime)
return result
@property
def intervals(self):
return self.reader.get_intervals()
def __repr__(self):
return '<LeafNode[%x]: %s (%s)>' % (id(self), self.path, self.reader)
|
apache-2.0
|
joshuaspence/ThesisCode
|
MATLAB/Lib/matlab_bgl-4.0.1/libmbgl/boost1.36/libs/python/pyste/src/Pyste/pyste.py
|
54
|
14022
|
# Copyright Bruno da Silva de Oliveira 2003. Use, modification and
# distribution is subject to the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
"""
Pyste version %s
Usage:
pyste [options] interface-files
where options are:
--module=<name> The name of the module that will be generated;
defaults to the first interface filename, without
the extension.
-I <path> Add an include path
-D <symbol> Define symbol
--multiple Create various cpps, instead of only one
(useful during development)
--out=<name> Specify output filename (default: <module>.cpp)
in --multiple mode, this will be a directory
--no-using Do not declare "using namespace boost";
use explicit declarations instead
--pyste-ns=<name> Set the namespace where new types will be declared;
default is the empty namespace
--debug Writes the xml for each file parsed in the current
directory
--cache-dir=<dir> Directory for cache files (speeds up future runs)
--only-create-cache Recreates all caches (doesn't generate code).
--generate-main Generates the _main.cpp file (in multiple mode)
--file-list A file with one pyste file per line. Use as a
substitute for passing the files in the command
line.
--gccxml-path=<path> Path to gccxml executable (default: gccxml)
--no-default-include Do not use INCLUDE environment variable for include
files to pass along gccxml.
-h, --help Print this help and exit
-v, --version Print version information
"""
import sys
import os
import getopt
import exporters
import SingleCodeUnit
import MultipleCodeUnit
import infos
import exporterutils
import settings
import gc
import sys
from policies import *
from CppParser import CppParser, CppParserError
import time
import declarations
__version__ = '0.9.30'
def RecursiveIncludes(include):
'Return a list containg the include dir and all its subdirectories'
dirs = [include]
def visit(arg, dir, names):
# ignore CVS dirs
if os.path.split(dir)[1] != 'CVS':
dirs.append(dir)
os.path.walk(include, visit, None)
return dirs
def GetDefaultIncludes():
if 'INCLUDE' in os.environ:
include = os.environ['INCLUDE']
return include.split(os.pathsep)
else:
return []
def ProcessIncludes(includes):
if sys.platform == 'win32':
index = 0
for include in includes:
includes[index] = include.replace('\\', '/')
index += 1
def ReadFileList(filename):
f = file(filename)
files = []
try:
for line in f:
line = line.strip()
if line:
files.append(line)
finally:
f.close()
return files
def ParseArguments():
def Usage():
print __doc__ % __version__
sys.exit(1)
try:
options, files = getopt.getopt(
sys.argv[1:],
'R:I:D:vh',
['module=', 'multiple', 'out=', 'no-using', 'pyste-ns=', 'debug', 'cache-dir=',
'only-create-cache', 'version', 'generate-main', 'file-list=', 'help',
'gccxml-path=', 'no-default-include'])
except getopt.GetoptError, e:
print
print 'ERROR:', e
Usage()
default_includes = GetDefaultIncludes()
includes = []
defines = []
module = None
out = None
multiple = False
cache_dir = None
create_cache = False
generate_main = False
gccxml_path = 'gccxml'
for opt, value in options:
if opt == '-I':
includes.append(value)
elif opt == '-D':
defines.append(value)
elif opt == '-R':
includes.extend(RecursiveIncludes(value))
elif opt == '--module':
module = value
elif opt == '--out':
out = value
elif opt == '--no-using':
settings.namespaces.python = 'boost::python::'
settings.USING_BOOST_NS = False
elif opt == '--pyste-ns':
settings.namespaces.pyste = value + '::'
elif opt == '--debug':
settings.DEBUG = True
elif opt == '--multiple':
multiple = True
elif opt == '--cache-dir':
cache_dir = value
elif opt == '--only-create-cache':
create_cache = True
elif opt == '--file-list':
files += ReadFileList(value)
elif opt in ['-h', '--help']:
Usage()
elif opt in ['-v', '--version']:
print 'Pyste version %s' % __version__
sys.exit(2)
elif opt == '--generate-main':
generate_main = True
elif opt == '--gccxml-path':
gccxml_path = value
elif opt == '--no-default-include':
default_includes = []
else:
print 'Unknown option:', opt
Usage()
includes[0:0] = default_includes
if not files:
Usage()
if not module:
module = os.path.splitext(os.path.basename(files[0]))[0]
if not out:
out = module
if not multiple:
out += '.cpp'
for file in files:
d = os.path.dirname(os.path.abspath(file))
if d not in sys.path:
sys.path.append(d)
if create_cache and not cache_dir:
print 'Error: Use --cache-dir to indicate where to create the cache files!'
Usage()
sys.exit(3)
if generate_main and not multiple:
print 'Error: --generate-main only valid in multiple mode.'
Usage()
sys.exit(3)
ProcessIncludes(includes)
return includes, defines, module, out, files, multiple, cache_dir, create_cache, \
generate_main, gccxml_path
def PCHInclude(*headers):
code = '\n'.join(['#include <%s>' % x for x in headers])
infos.CodeInfo(code, 'pchinclude')
def CreateContext():
'create the context where a interface file will be executed'
context = {}
context['Import'] = Import
# infos
context['Function'] = infos.FunctionInfo
context['Class'] = infos.ClassInfo
context['Include'] = lambda header: infos.CodeInfo('#include <%s>\n' % header, 'include')
context['PCHInclude'] = PCHInclude
context['Template'] = infos.ClassTemplateInfo
context['Enum'] = infos.EnumInfo
context['AllFromHeader'] = infos.HeaderInfo
context['Var'] = infos.VarInfo
# functions
context['rename'] = infos.rename
context['set_policy'] = infos.set_policy
context['exclude'] = infos.exclude
context['set_wrapper'] = infos.set_wrapper
context['use_shared_ptr'] = infos.use_shared_ptr
context['use_auto_ptr'] = infos.use_auto_ptr
context['holder'] = infos.holder
context['add_method'] = infos.add_method
context['final'] = infos.final
context['export_values'] = infos.export_values
# policies
context['return_internal_reference'] = return_internal_reference
context['with_custodian_and_ward'] = with_custodian_and_ward
context['return_value_policy'] = return_value_policy
context['reference_existing_object'] = reference_existing_object
context['copy_const_reference'] = copy_const_reference
context['copy_non_const_reference'] = copy_non_const_reference
context['return_opaque_pointer'] = return_opaque_pointer
context['manage_new_object'] = manage_new_object
context['return_by_value'] = return_by_value
context['return_self'] = return_self
# utils
context['Wrapper'] = exporterutils.FunctionWrapper
context['declaration_code'] = lambda code: infos.CodeInfo(code, 'declaration-outside')
context['module_code'] = lambda code: infos.CodeInfo(code, 'module')
context['class_code'] = infos.class_code
return context
def Begin():
# parse arguments
includes, defines, module, out, interfaces, multiple, cache_dir, create_cache, generate_main, gccxml_path = ParseArguments()
# run pyste scripts
for interface in interfaces:
ExecuteInterface(interface)
# create the parser
parser = CppParser(includes, defines, cache_dir, declarations.version, gccxml_path)
try:
if not create_cache:
if not generate_main:
return GenerateCode(parser, module, out, interfaces, multiple)
else:
return GenerateMain(module, out, OrderInterfaces(interfaces))
else:
return CreateCaches(parser)
finally:
parser.Close()
def CreateCaches(parser):
# There is one cache file per interface so we organize the headers
# by interfaces. For each interface collect the tails from the
# exporters sharing the same header.
tails = JoinTails(exporters.exporters)
# now for each interface file take each header, and using the tail
# get the declarations and cache them.
for interface, header in tails:
tail = tails[(interface, header)]
declarations = parser.ParseWithGCCXML(header, tail)
cachefile = parser.CreateCache(header, interface, tail, declarations)
print 'Cached', cachefile
return 0
_imported_count = {} # interface => count
def ExecuteInterface(interface):
old_interface = exporters.current_interface
if not os.path.exists(interface):
if old_interface and os.path.exists(old_interface):
d = os.path.dirname(old_interface)
interface = os.path.join(d, interface)
if not os.path.exists(interface):
raise IOError, "Cannot find interface file %s."%interface
_imported_count[interface] = _imported_count.get(interface, 0) + 1
exporters.current_interface = interface
context = CreateContext()
context['INTERFACE_FILE'] = os.path.abspath(interface)
execfile(interface, context)
exporters.current_interface = old_interface
def Import(interface):
exporters.importing = True
ExecuteInterface(interface)
exporters.importing = False
def JoinTails(exports):
'''Returns a dict of {(interface, header): tail}, where tail is the
joining of all tails of all exports for the header.
'''
tails = {}
for export in exports:
interface = export.interface_file
header = export.Header()
tail = export.Tail() or ''
if (interface, header) in tails:
all_tails = tails[(interface,header)]
all_tails += '\n' + tail
tails[(interface, header)] = all_tails
else:
tails[(interface, header)] = tail
return tails
def OrderInterfaces(interfaces):
interfaces_order = [(_imported_count[x], x) for x in interfaces]
interfaces_order.sort()
interfaces_order.reverse()
return [x for _, x in interfaces_order]
def GenerateMain(module, out, interfaces):
codeunit = MultipleCodeUnit.MultipleCodeUnit(module, out)
codeunit.GenerateMain(interfaces)
return 0
def GenerateCode(parser, module, out, interfaces, multiple):
# prepare to generate the wrapper code
if multiple:
codeunit = MultipleCodeUnit.MultipleCodeUnit(module, out)
else:
codeunit = SingleCodeUnit.SingleCodeUnit(module, out)
# stop referencing the exporters here
exports = exporters.exporters
exporters.exporters = None
exported_names = dict([(x.Name(), None) for x in exports])
# order the exports
order = {}
for export in exports:
if export.interface_file in order:
order[export.interface_file].append(export)
else:
order[export.interface_file] = [export]
exports = []
interfaces_order = OrderInterfaces(interfaces)
for interface in interfaces_order:
exports.extend(order[interface])
del order
del interfaces_order
# now generate the code in the correct order
#print exported_names
tails = JoinTails(exports)
for i in xrange(len(exports)):
export = exports[i]
interface = export.interface_file
header = export.Header()
if header:
tail = tails[(interface, header)]
declarations, parsed_header = parser.Parse(header, interface, tail)
else:
declarations = []
parsed_header = None
ExpandTypedefs(declarations, exported_names)
export.SetDeclarations(declarations)
export.SetParsedHeader(parsed_header)
if multiple:
codeunit.SetCurrent(export.interface_file, export.Name())
export.GenerateCode(codeunit, exported_names)
# force collect of cyclic references
exports[i] = None
del declarations
del export
gc.collect()
# finally save the code unit
codeunit.Save()
if not multiple:
print 'Module %s generated' % module
return 0
def ExpandTypedefs(decls, exported_names):
'''Check if the names in exported_names are a typedef, and add the real class
name in the dict.
'''
for name in exported_names.keys():
for decl in decls:
if isinstance(decl, declarations.Typedef):
exported_names[decl.type.FullName()] = None
def UsePsyco():
'Tries to use psyco if possible'
try:
import psyco
psyco.profile()
except: pass
def main():
start = time.clock()
UsePsyco()
status = Begin()
print '%0.2f seconds' % (time.clock()-start)
sys.exit(status)
if __name__ == '__main__':
main()
|
gpl-3.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.