repo_name stringlengths 6 100 | path stringlengths 4 294 | copies stringlengths 1 5 | size stringlengths 4 6 | content stringlengths 606 896k | license stringclasses 15
values |
|---|---|---|---|---|---|
DobbinFox/emacs.d | elpa/elpy-20161125.56/elpy/tests/test_jedibackend.py | 13 | 10253 | """Tests for the elpy.jedibackend module."""
import sys
import unittest
import jedi
import mock
from elpy import jedibackend
from elpy import rpc
from elpy.tests import compat
from elpy.tests.support import BackendTestCase
from elpy.tests.support import RPCGetCompletionsTests
from elpy.tests.support import RPCGetCompletionDocstringTests
from elpy.tests.support import RPCGetCompletionLocationTests
from elpy.tests.support import RPCGetDocstringTests
from elpy.tests.support import RPCGetDefinitionTests
from elpy.tests.support import RPCGetCalltipTests
from elpy.tests.support import RPCGetUsagesTests
class JediBackendTestCase(BackendTestCase):
def setUp(self):
super(JediBackendTestCase, self).setUp()
self.backend = jedibackend.JediBackend(self.project_root)
class TestInit(JediBackendTestCase):
def test_should_have_jedi_as_name(self):
self.assertEqual(self.backend.name, "jedi")
class TestRPCGetCompletions(RPCGetCompletionsTests,
JediBackendTestCase):
pass
class TestRPCGetCompletionDocstring(RPCGetCompletionDocstringTests,
JediBackendTestCase):
pass
class TestRPCGetCompletionLocation(RPCGetCompletionLocationTests,
JediBackendTestCase):
pass
class TestRPCGetDocstring(RPCGetDocstringTests,
JediBackendTestCase):
JSON_LOADS_DOCSTRING = (
'loads(s, encoding=None, cls=None, '
'object_hook=None, parse_float=None,'
)
def check_docstring(self, docstring):
lines = docstring.splitlines()
self.assertEqual(lines[0], 'Documentation for json.loads:')
self.assertEqual(lines[2], self.JSON_LOADS_DOCSTRING)
class TestRPCGetDefinition(RPCGetDefinitionTests,
JediBackendTestCase):
@mock.patch("jedi.Script")
def test_should_not_fail_if_module_path_is_none(self, Script):
"""Do not fail if loc.module_path is None.
This can happen under some circumstances I am unsure about.
See #537 for the issue that reported this.
"""
locations = [
mock.Mock(module_path=None)
]
script = Script.return_value
script.goto_definitions.return_value = locations
script.goto_assignments.return_value = locations
location = self.rpc("", "", 0)
self.assertIsNone(location)
class TestRPCGetCalltip(RPCGetCalltipTests,
JediBackendTestCase):
KEYS_CALLTIP = {'index': 0,
'params': [''],
'name': u'keys'}
if sys.version_info >= (3, 5):
RADIX_CALLTIP = {'index': 0,
'params': ['10'],
'name': u'radix'}
else:
RADIX_CALLTIP = {'index': None,
'params': [],
'name': u'radix'}
ADD_CALLTIP = {'index': 0,
'params': [u'a', u'b'],
'name': u'add'}
if compat.PYTHON3:
THREAD_CALLTIP = {"name": "Thread",
"params": ["group=None",
"target=None",
"name=None",
"args=()",
"kwargs=None",
"daemon=None"],
"index": 0}
else:
THREAD_CALLTIP = {"name": "Thread",
"params": ["group=None",
"target=None",
"name=None",
"args=()",
"kwargs=None",
"verbose=None"],
"index": 0}
def test_should_not_fail_with_get_subscope_by_name(self):
# Bug #677 / jedi#628
source = (
u"my_lambda = lambda x: x+1\n"
u"my_lambda(1)"
)
filename = self.project_file("project.py", source)
offset = 37
sigs = self.backend.rpc_get_calltip(filename, source, offset)
if sigs is not None:
sigs[0].index
class TestRPCGetUsages(RPCGetUsagesTests,
JediBackendTestCase):
def test_should_not_fail_for_missing_module(self):
# This causes use.module_path to be None
source = "import sys\n\nsys.path.\n" # insert()"
offset = 21
filename = self.project_file("project.py", source)
self.rpc(filename, source, offset)
class TestPosToLinecol(unittest.TestCase):
def test_should_handle_beginning_of_string(self):
self.assertEqual(jedibackend.pos_to_linecol("foo", 0),
(1, 0))
def test_should_handle_end_of_line(self):
self.assertEqual(jedibackend.pos_to_linecol("foo\nbar\nbaz\nqux", 9),
(3, 1))
def test_should_handle_end_of_string(self):
self.assertEqual(jedibackend.pos_to_linecol("foo\nbar\nbaz\nqux", 14),
(4, 2))
class TestLinecolToPos(unittest.TestCase):
def test_should_handle_beginning_of_string(self):
self.assertEqual(jedibackend.linecol_to_pos("foo", 1, 0),
0)
def test_should_handle_end_of_string(self):
self.assertEqual(jedibackend.linecol_to_pos("foo\nbar\nbaz\nqux",
3, 1),
9)
def test_should_return_offset(self):
self.assertEqual(jedibackend.linecol_to_pos("foo\nbar\nbaz\nqux",
4, 2),
14)
def test_should_fail_for_line_past_text(self):
self.assertRaises(ValueError,
jedibackend.linecol_to_pos, "foo\n", 3, 1)
def test_should_fail_for_column_past_text(self):
self.assertRaises(ValueError,
jedibackend.linecol_to_pos, "foo\n", 1, 10)
class TestRunWithDebug(unittest.TestCase):
@mock.patch('jedi.Script')
def test_should_call_method(self, Script):
Script.return_value.test_method.return_value = "test-result"
result = jedibackend.run_with_debug(jedi, 'test_method', 1, 2, arg=3)
Script.assert_called_with(1, 2, arg=3)
self.assertEqual(result, 'test-result')
@mock.patch('jedi.Script')
def test_should_re_raise(self, Script):
Script.side_effect = RuntimeError
with self.assertRaises(RuntimeError):
jedibackend.run_with_debug(jedi, 'test_method', 1, 2, arg=3,
re_raise=(RuntimeError,))
@mock.patch('jedi.Script')
@mock.patch('jedi.set_debug_function')
def test_should_keep_debug_info(self, set_debug_function, Script):
Script.side_effect = RuntimeError
try:
jedibackend.run_with_debug(jedi, 'test_method', 1, 2, arg=3)
except rpc.Fault as e:
self.assertGreaterEqual(e.code, 400)
self.assertIsNotNone(e.data)
self.assertIn("traceback", e.data)
jedi_debug_info = e.data["jedi_debug_info"]
self.assertIsNotNone(jedi_debug_info)
self.assertEqual(jedi_debug_info["script_args"],
"1, 2, arg=3")
self.assertEqual(jedi_debug_info["source"], None)
self.assertEqual(jedi_debug_info["method"], "test_method")
self.assertEqual(jedi_debug_info["debug_info"], [])
else:
self.fail("Fault not thrown")
@mock.patch('jedi.Script')
@mock.patch('jedi.set_debug_function')
def test_should_keep_error_text(self, set_debug_function, Script):
Script.side_effect = RuntimeError
try:
jedibackend.run_with_debug(jedi, 'test_method', 1, 2, arg=3)
except rpc.Fault as e:
self.assertEqual(str(e), str(RuntimeError()))
self.assertEqual(e.message, str(RuntimeError()))
else:
self.fail("Fault not thrown")
@mock.patch('jedi.Script')
@mock.patch('jedi.set_debug_function')
def test_should_handle_source_special(self, set_debug_function, Script):
Script.side_effect = RuntimeError
try:
jedibackend.run_with_debug(jedi, 'test_method', source="foo")
except rpc.Fault as e:
self.assertEqual(e.data["jedi_debug_info"]["script_args"],
"source=source")
self.assertEqual(e.data["jedi_debug_info"]["source"], "foo")
else:
self.fail("Fault not thrown")
@mock.patch('jedi.Script')
@mock.patch('jedi.set_debug_function')
def test_should_set_debug_info(self, set_debug_function, Script):
the_debug_function = [None]
def my_set_debug_function(debug_function, **kwargs):
the_debug_function[0] = debug_function
def my_script(*args, **kwargs):
the_debug_function[0](jedi.debug.NOTICE, "Notice")
the_debug_function[0](jedi.debug.WARNING, "Warning")
the_debug_function[0]("other", "Other")
raise RuntimeError
set_debug_function.side_effect = my_set_debug_function
Script.return_value.test_method = my_script
try:
jedibackend.run_with_debug(jedi, 'test_method', source="foo")
except rpc.Fault as e:
self.assertEqual(e.data["jedi_debug_info"]["debug_info"],
["[N] Notice",
"[W] Warning",
"[?] Other"])
else:
self.fail("Fault not thrown")
@mock.patch('jedi.set_debug_function')
@mock.patch('jedi.Script')
def test_should_not_fail_with_bad_data(self, Script, set_debug_function):
import jedi.debug
def set_debug(function, speed=True):
if function is not None:
function(jedi.debug.NOTICE, u"\xab")
set_debug_function.side_effect = set_debug
Script.return_value.test_method.side_effect = Exception
with self.assertRaises(rpc.Fault):
jedibackend.run_with_debug(jedi, 'test_method', 1, 2, arg=3)
| mit |
dronefly/dronefly.github.io | flask/lib/python2.7/site-packages/jinja2/meta.py | 336 | 4198 | # -*- coding: utf-8 -*-
"""
jinja2.meta
~~~~~~~~~~~
This module implements various functions that exposes information about
templates that might be interesting for various kinds of applications.
:copyright: (c) 2010 by the Jinja Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from jinja2 import nodes
from jinja2.compiler import CodeGenerator
from jinja2._compat import string_types
class TrackingCodeGenerator(CodeGenerator):
"""We abuse the code generator for introspection."""
def __init__(self, environment):
CodeGenerator.__init__(self, environment, '<introspection>',
'<introspection>')
self.undeclared_identifiers = set()
def write(self, x):
"""Don't write."""
def pull_locals(self, frame):
"""Remember all undeclared identifiers."""
self.undeclared_identifiers.update(frame.identifiers.undeclared)
def find_undeclared_variables(ast):
"""Returns a set of all variables in the AST that will be looked up from
the context at runtime. Because at compile time it's not known which
variables will be used depending on the path the execution takes at
runtime, all variables are returned.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% set foo = 42 %}{{ bar + foo }}')
>>> meta.find_undeclared_variables(ast) == set(['bar'])
True
.. admonition:: Implementation
Internally the code generator is used for finding undeclared variables.
This is good to know because the code generator might raise a
:exc:`TemplateAssertionError` during compilation and as a matter of
fact this function can currently raise that exception as well.
"""
codegen = TrackingCodeGenerator(ast.environment)
codegen.visit(ast)
return codegen.undeclared_identifiers
def find_referenced_templates(ast):
"""Finds all the referenced templates from the AST. This will return an
iterator over all the hardcoded template extensions, inclusions and
imports. If dynamic inheritance or inclusion is used, `None` will be
yielded.
>>> from jinja2 import Environment, meta
>>> env = Environment()
>>> ast = env.parse('{% extends "layout.html" %}{% include helper %}')
>>> list(meta.find_referenced_templates(ast))
['layout.html', None]
This function is useful for dependency tracking. For example if you want
to rebuild parts of the website after a layout template has changed.
"""
for node in ast.find_all((nodes.Extends, nodes.FromImport, nodes.Import,
nodes.Include)):
if not isinstance(node.template, nodes.Const):
# a tuple with some non consts in there
if isinstance(node.template, (nodes.Tuple, nodes.List)):
for template_name in node.template.items:
# something const, only yield the strings and ignore
# non-string consts that really just make no sense
if isinstance(template_name, nodes.Const):
if isinstance(template_name.value, string_types):
yield template_name.value
# something dynamic in there
else:
yield None
# something dynamic we don't know about here
else:
yield None
continue
# constant is a basestring, direct template name
if isinstance(node.template.value, string_types):
yield node.template.value
# a tuple or list (latter *should* not happen) made of consts,
# yield the consts that are strings. We could warn here for
# non string values
elif isinstance(node, nodes.Include) and \
isinstance(node.template.value, (tuple, list)):
for template_name in node.template.value:
if isinstance(template_name, string_types):
yield template_name
# something else we don't care about, we could warn here
else:
yield None
| apache-2.0 |
aimanaiman/supernomadfriendsquad | node_modules/node-gyp/gyp/pylib/gyp/ninja_syntax.py | 2485 | 5536 | # This file comes from
# https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py
# Do not edit! Edit the upstream one instead.
"""Python module for generating .ninja files.
Note that this is emphatically not a required piece of Ninja; it's
just a helpful utility for build-file-generation systems that already
use Python.
"""
import textwrap
import re
def escape_path(word):
return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:')
class Writer(object):
def __init__(self, output, width=78):
self.output = output
self.width = width
def newline(self):
self.output.write('\n')
def comment(self, text):
for line in textwrap.wrap(text, self.width - 2):
self.output.write('# ' + line + '\n')
def variable(self, key, value, indent=0):
if value is None:
return
if isinstance(value, list):
value = ' '.join(filter(None, value)) # Filter out empty strings.
self._line('%s = %s' % (key, value), indent)
def pool(self, name, depth):
self._line('pool %s' % name)
self.variable('depth', depth, indent=1)
def rule(self, name, command, description=None, depfile=None,
generator=False, pool=None, restat=False, rspfile=None,
rspfile_content=None, deps=None):
self._line('rule %s' % name)
self.variable('command', command, indent=1)
if description:
self.variable('description', description, indent=1)
if depfile:
self.variable('depfile', depfile, indent=1)
if generator:
self.variable('generator', '1', indent=1)
if pool:
self.variable('pool', pool, indent=1)
if restat:
self.variable('restat', '1', indent=1)
if rspfile:
self.variable('rspfile', rspfile, indent=1)
if rspfile_content:
self.variable('rspfile_content', rspfile_content, indent=1)
if deps:
self.variable('deps', deps, indent=1)
def build(self, outputs, rule, inputs=None, implicit=None, order_only=None,
variables=None):
outputs = self._as_list(outputs)
all_inputs = self._as_list(inputs)[:]
out_outputs = list(map(escape_path, outputs))
all_inputs = list(map(escape_path, all_inputs))
if implicit:
implicit = map(escape_path, self._as_list(implicit))
all_inputs.append('|')
all_inputs.extend(implicit)
if order_only:
order_only = map(escape_path, self._as_list(order_only))
all_inputs.append('||')
all_inputs.extend(order_only)
self._line('build %s: %s' % (' '.join(out_outputs),
' '.join([rule] + all_inputs)))
if variables:
if isinstance(variables, dict):
iterator = iter(variables.items())
else:
iterator = iter(variables)
for key, val in iterator:
self.variable(key, val, indent=1)
return outputs
def include(self, path):
self._line('include %s' % path)
def subninja(self, path):
self._line('subninja %s' % path)
def default(self, paths):
self._line('default %s' % ' '.join(self._as_list(paths)))
def _count_dollars_before_index(self, s, i):
"""Returns the number of '$' characters right in front of s[i]."""
dollar_count = 0
dollar_index = i - 1
while dollar_index > 0 and s[dollar_index] == '$':
dollar_count += 1
dollar_index -= 1
return dollar_count
def _line(self, text, indent=0):
"""Write 'text' word-wrapped at self.width characters."""
leading_space = ' ' * indent
while len(leading_space) + len(text) > self.width:
# The text is too wide; wrap if possible.
# Find the rightmost space that would obey our width constraint and
# that's not an escaped space.
available_space = self.width - len(leading_space) - len(' $')
space = available_space
while True:
space = text.rfind(' ', 0, space)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# No such space; just use the first unescaped space we can find.
space = available_space - 1
while True:
space = text.find(' ', space + 1)
if space < 0 or \
self._count_dollars_before_index(text, space) % 2 == 0:
break
if space < 0:
# Give up on breaking.
break
self.output.write(leading_space + text[0:space] + ' $\n')
text = text[space+1:]
# Subsequent lines are continuations, so indent them.
leading_space = ' ' * (indent+2)
self.output.write(leading_space + text + '\n')
def _as_list(self, input):
if input is None:
return []
if isinstance(input, list):
return input
return [input]
def escape(string):
"""Escape a string such that it can be embedded into a Ninja file without
further interpretation."""
assert '\n' not in string, 'Ninja syntax does not allow newlines'
# We only have one special metacharacter: '$'.
return string.replace('$', '$$')
| mit |
huaweiswitch/neutron | neutron/plugins/cisco/cfg_agent/device_drivers/driver_mgr.py | 7 | 4410 | # Copyright 2014 Cisco Systems, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.openstack.common import excutils
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.plugins.cisco.cfg_agent import cfg_exceptions
LOG = logging.getLogger(__name__)
class DeviceDriverManager(object):
"""This class acts as a manager for device drivers.
The device driver manager maintains the relationship between the
different neutron logical resource (eg: routers, firewalls, vpns etc.) and
where they are hosted. For configuring a logical resource (router) in a
hosting device, a corresponding device driver object is used.
Device drivers encapsulate the necessary configuration information to
configure a logical resource (eg: routers, firewalls, vpns etc.) on a
hosting device (eg: CSR1kv).
The device driver class loads one driver object per hosting device.
The loaded drivers are cached in memory, so when a request is made to
get driver object for the same hosting device and resource (like router),
the existing driver object is reused.
This class is used by the service helper classes.
"""
def __init__(self):
self._drivers = {}
self._hosting_device_routing_drivers_binding = {}
def get_driver(self, resource_id):
try:
return self._drivers[resource_id]
except KeyError:
with excutils.save_and_reraise_exception(reraise=False):
raise cfg_exceptions.DriverNotFound(id=resource_id)
def set_driver(self, resource):
"""Set the driver for a neutron resource.
:param resource: Neutron resource in dict format. Expected keys:
{ 'id': <value>
'hosting_device': { 'id': <value>, }
'router_type': {'cfg_agent_driver': <value>, }
}
:return driver : driver object
"""
try:
resource_id = resource['id']
hosting_device = resource['hosting_device']
hd_id = hosting_device['id']
if hd_id in self._hosting_device_routing_drivers_binding:
driver = self._hosting_device_routing_drivers_binding[hd_id]
self._drivers[resource_id] = driver
else:
driver_class = resource['router_type']['cfg_agent_driver']
driver = importutils.import_object(driver_class,
**hosting_device)
self._hosting_device_routing_drivers_binding[hd_id] = driver
self._drivers[resource_id] = driver
return driver
except ImportError:
with excutils.save_and_reraise_exception(reraise=False):
LOG.exception(_("Error loading cfg agent driver %(driver)s "
"for hosting device template "
"%(t_name)s(%(t_id)s)"),
{'driver': driver_class, 't_id': hd_id,
't_name': resource['name']})
raise cfg_exceptions.DriverNotExist(driver=driver_class)
except KeyError as e:
with excutils.save_and_reraise_exception(reraise=False):
raise cfg_exceptions.DriverNotSetForMissingParameter(e)
def remove_driver(self, resource_id):
"""Remove driver associated to a particular resource."""
if resource_id in self._drivers:
del self._drivers[resource_id]
def remove_driver_for_hosting_device(self, hd_id):
"""Remove driver associated to a particular hosting device."""
if hd_id in self._hosting_device_routing_drivers_binding:
del self._hosting_device_routing_drivers_binding[hd_id]
| apache-2.0 |
RebeccaWPerry/vispy | vispy/visuals/polygon.py | 20 | 3795 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
Simple polygon visual based on MeshVisual and LineVisual
"""
from __future__ import division
import numpy as np
from .visual import CompoundVisual
from .mesh import MeshVisual
from .line import LineVisual
from ..color import Color
from ..geometry import PolygonData
from ..gloo import set_state
class PolygonVisual(CompoundVisual):
"""
Displays a 2D polygon
Parameters
----------
pos : array
Set of vertices defining the polygon.
color : str | tuple | list of colors
Fill color of the polygon.
border_color : str | tuple | list of colors
Border color of the polygon.
border_width : int
Border width in pixels.
**kwargs : dict
Keyword arguments to pass to `PolygonVisual`.
"""
def __init__(self, pos=None, color='black',
border_color=None, border_width=1, **kwargs):
self._mesh = MeshVisual()
self._border = LineVisual()
self._pos = pos
self._color = Color(color)
self._border_width = border_width
self._border_color = Color(border_color)
self._update()
CompoundVisual.__init__(self, [self._mesh, self._border], **kwargs)
self._mesh.set_gl_state(polygon_offset_fill=True,
polygon_offset=(1, 1), cull_face=False)
self.freeze()
def _update(self):
self.data = PolygonData(vertices=np.array(self._pos, dtype=np.float32))
if self._pos is None:
return
if not self._color.is_blank:
pts, tris = self.data.triangulate()
set_state(polygon_offset_fill=False)
self._mesh.set_data(vertices=pts, faces=tris.astype(np.uint32),
color=self._color.rgba)
if not self._border_color.is_blank:
# Close border if it is not already.
border_pos = self._pos
if np.any(border_pos[0] != border_pos[1]):
border_pos = np.concatenate([border_pos, border_pos[:1]],
axis=0)
self._border.set_data(pos=border_pos,
color=self._border_color.rgba,
width=self._border_width,
connect='strip')
self._border.update()
@property
def pos(self):
""" The vertex position of the polygon.
"""
return self._pos
@pos.setter
def pos(self, pos):
self._pos = pos
self._update()
@property
def color(self):
""" The color of the polygon.
"""
return self._color
@color.setter
def color(self, color):
self._color = Color(color, clip=True)
self._update()
@property
def border_color(self):
""" The border color of the polygon.
"""
return self._border_color
@border_color.setter
def border_color(self, border_color):
self._border_color = Color(border_color)
self._update()
@property
def mesh(self):
"""The vispy.visuals.MeshVisual that is owned by the PolygonVisual.
It is used to fill in the polygon
"""
return self._mesh
@mesh.setter
def mesh(self, mesh):
self._mesh = mesh
self._update()
@property
def border(self):
"""The vispy.visuals.LineVisual that is owned by the PolygonVisual.
It is used to draw the border of the polygon
"""
return self._border
@border.setter
def border(self, border):
self._border = border
self._update()
| bsd-3-clause |
rooshilp/CMPUT410W15-project | testenv/lib/python2.7/site-packages/django/db/backends/mysql/schema.py | 53 | 2192 | from django.db.backends.schema import BaseDatabaseSchemaEditor
from django.db.models import NOT_PROVIDED
class DatabaseSchemaEditor(BaseDatabaseSchemaEditor):
sql_rename_table = "RENAME TABLE %(old_table)s TO %(new_table)s"
sql_alter_column_null = "MODIFY %(column)s %(type)s NULL"
sql_alter_column_not_null = "MODIFY %(column)s %(type)s NOT NULL"
sql_alter_column_type = "MODIFY %(column)s %(type)s"
sql_rename_column = "ALTER TABLE %(table)s CHANGE %(old_column)s %(new_column)s %(type)s"
sql_delete_unique = "ALTER TABLE %(table)s DROP INDEX %(name)s"
sql_create_fk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) REFERENCES %(to_table)s (%(to_column)s)"
sql_delete_fk = "ALTER TABLE %(table)s DROP FOREIGN KEY %(name)s"
sql_delete_index = "DROP INDEX %(name)s ON %(table)s"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
alter_string_set_null = 'MODIFY %(column)s %(type)s NULL;'
alter_string_drop_null = 'MODIFY %(column)s %(type)s NOT NULL;'
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP PRIMARY KEY"
def quote_value(self, value):
# Inner import to allow module to fail to load gracefully
import MySQLdb.converters
return MySQLdb.escape(value, MySQLdb.converters.conversions)
def skip_default(self, field):
"""
MySQL doesn't accept default values for longtext and longblob
and implicitly treats these columns as nullable.
"""
return field.db_type(self.connection) in {'longtext', 'longblob'}
def add_field(self, model, field):
super(DatabaseSchemaEditor, self).add_field(model, field)
# Simulate the effect of a one-off default.
if self.skip_default(field) and field.default not in {None, NOT_PROVIDED}:
effective_default = self.effective_default(field)
self.execute('UPDATE %(table)s SET %(column)s = %%s' % {
'table': self.quote_name(model._meta.db_table),
'column': self.quote_name(field.column),
}, [effective_default])
| gpl-2.0 |
AIFDR/inasafe-django | django_project/realtime/migrations/0050_reporttemplate.py | 2 | 1641 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('realtime', '0049_auto_20180320_0406'),
]
operations = [
migrations.CreateModel(
name='ReportTemplate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('timestamp', models.DateTimeField(help_text='The time the template uploaded.', verbose_name='Timestamp')),
('version', models.CharField(default=None, max_length=10, blank=True, help_text='Version number of the template.', null=True, verbose_name='Template version')),
('notes', models.CharField(default=None, max_length=255, blank=True, help_text='Notes of the report template.', null=True, verbose_name='Template Notes')),
('language', models.CharField(default=b'id', help_text='The language ID of the report', max_length=4, verbose_name='Language ID')),
('hazard', models.CharField(default=None, help_text='The hazard type of the template.', max_length=25, verbose_name='Hazard Type')),
('template_file', models.FileField(help_text='Template file formatted as qgis template file (*.qpt).', upload_to=b'', verbose_name='Template File')),
('owner', models.IntegerField(default=0, help_text='The owner/uploader of the template.', verbose_name='Owner')),
],
options={
'verbose_name_plural': 'Report Templates',
},
),
]
| bsd-2-clause |
Fireblend/chromium-crosswalk | tools/win/split_link/graph_dependencies.py | 145 | 2291 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import shutil
import subprocess
import sys
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
def main():
if len(sys.argv) != 2:
print 'usage: %s <output.html>' % sys.argv[0]
return 1
env = os.environ.copy()
env['GYP_GENERATORS'] = 'dump_dependency_json'
print 'Dumping dependencies...'
popen = subprocess.Popen(
['python', 'build/gyp_chromium'],
shell=True, env=env)
popen.communicate()
if popen.returncode != 0:
return popen.returncode
print 'Finding problems...'
popen = subprocess.Popen(
['python', 'tools/gyp-explain.py', '--dot',
'chrome.gyp:browser#', 'core.gyp:webcore#'],
stdout=subprocess.PIPE,
shell=True)
out, _ = popen.communicate()
if popen.returncode != 0:
return popen.returncode
# Break into pairs to uniq to make graph less of a mess.
print 'Simplifying...'
deduplicated = set()
lines = out.splitlines()[2:-1]
for line in lines:
line = line.strip('\r\n ;')
pairs = line.split(' -> ')
for i in range(len(pairs) - 1):
deduplicated.add('%s -> %s;' % (pairs[i], pairs[i + 1]))
graph = 'strict digraph {\n' + '\n'.join(sorted(deduplicated)) + '\n}'
print 'Writing report to %s...' % sys.argv[1]
path_count = len(out.splitlines())
with open(os.path.join(BASE_DIR, 'viz.js', 'viz.js')) as f:
viz_js = f.read()
with open(sys.argv[1], 'w') as f:
f.write(PREFIX % path_count)
f.write(graph)
f.write(SUFFIX % viz_js)
print 'Done.'
PREFIX = r'''<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<title>Undesirable Dependencies</title>
</head>
<body>
<h1>Undesirable Dependencies</h1>
<h2>browser → webcore</h2>
<h3>%d paths</h3>
<script type="text/vnd.graphviz" id="graph">
'''
SUFFIX = r'''
</script>
<script>%s</script>
<div id="output">Rendering...</div>
<script>
setTimeout(function() {
document.getElementById("output").innerHTML =
Viz(document.getElementById("graph").innerHTML, "svg");
}, 1);
</script>
</body>
</html>
'''
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause |
supernifty/mgsa | mgsa/analyze_bam.py | 1 | 2071 |
import argparse
import collections
import numpy
import sys
import bio
import config
parser = argparse.ArgumentParser(description='Analyze BAM')
parser.add_argument('bam', metavar='bam', help='bam file to analyze')
parser.add_argument('--buckets', metavar='buckets', type=int, default=10, help='number of buckets')
parser.add_argument('--max_sample', metavar='max_sample', type=int, default=-1, help='max number of samples in each group')
parser.add_argument('--skip', metavar='skip', type=int, default=0, help='skip the first reads')
args = parser.parse_args()
bam = bio.BamReaderExternal( config.BAM_TO_SAM, args.bam )
stats = bio.SamStats( bam, max_sample=args.max_sample, skip=skip )
# gc
buckets = numpy.linspace(0, 1, args.buckets + 1)
mapped_buckets = bio.bucket( filter( None, stats.mapped['gc'] ), buckets )
unmapped_buckets = bio.bucket( filter( None, stats.unmapped['gc'] ), buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '========== GC content =========='
print 'GC %%: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
# entropy
mapped_buckets = bio.bucket( stats.mapped['entropy'], buckets )
unmapped_buckets = bio.bucket( stats.unmapped['entropy'], buckets )
total_mapped = sum( mapped_buckets )
total_unmapped = sum( unmapped_buckets )
print '\n========== Entropy =========='
print 'Mapped: min: %.2f max: %.2f' % ( min( stats.mapped['entropy'] ), max( stats.mapped['entropy'] ) )
print 'Unmapped: min: %.2f max: %.2f' % ( min( stats.unmapped['entropy'] ), max( stats.unmapped['entropy'] ) )
print 'Entropy: %s' % '\t'.join( [ '%.2f' % bucket for bucket in buckets ] )
print 'mapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_mapped ) for x in mapped_buckets ] )
print 'unmapped: %s' % '\t'.join( [ '%.1f' % ( 100. * x / total_unmapped ) for x in unmapped_buckets ] )
| mit |
kenshay/ImageScripter | ProgramData/SystemFiles/Python/Lib/site-packages/urllib3/contrib/ntlmpool.py | 63 | 4459 | """
NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
from __future__ import absolute_import
from logging import getLogger
from ntlm import ntlm
from .. import HTTPSConnectionPool
from ..packages.six.moves.http_client import HTTPSConnection
log = getLogger(__name__)
class NTLMConnectionPool(HTTPSConnectionPool):
"""
Implements an NTLM authentication version of an urllib3 connection pool
"""
scheme = 'https'
def __init__(self, user, pw, authurl, *args, **kwargs):
"""
authurl is a random URL on the server that is protected by NTLM.
user is the Windows user, probably in the DOMAIN\\username format.
pw is the password for the user.
"""
super(NTLMConnectionPool, self).__init__(*args, **kwargs)
self.authurl = authurl
self.rawuser = user
user_parts = user.split('\\', 1)
self.domain = user_parts[0].upper()
self.user = user_parts[1]
self.pw = pw
def _new_conn(self):
# Performs the NTLM handshake that secures the connection. The socket
# must be kept open while requests are performed.
self.num_connections += 1
log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s',
self.num_connections, self.host, self.authurl)
headers = {'Connection': 'Keep-Alive'}
req_header = 'Authorization'
resp_header = 'www-authenticate'
conn = HTTPSConnection(host=self.host, port=self.port)
# Send negotiation message
headers[req_header] = (
'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
reshdr = dict(res.getheaders())
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', reshdr)
log.debug('Response data: %s [...]', res.read(100))
# Remove the reference to the socket, so that it can not be closed by
# the response object (we want to keep the socket open)
res.fp = None
# Server should respond with a challenge message
auth_header_values = reshdr[resp_header].split(', ')
auth_header_value = None
for s in auth_header_values:
if s[:5] == 'NTLM ':
auth_header_value = s[5:]
if auth_header_value is None:
raise Exception('Unexpected %s response header: %s' %
(resp_header, reshdr[resp_header]))
# Send authentication message
ServerChallenge, NegotiateFlags = \
ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
self.user,
self.domain,
self.pw,
NegotiateFlags)
headers[req_header] = 'NTLM %s' % auth_msg
log.debug('Request headers: %s', headers)
conn.request('GET', self.authurl, None, headers)
res = conn.getresponse()
log.debug('Response status: %s %s', res.status, res.reason)
log.debug('Response headers: %s', dict(res.getheaders()))
log.debug('Response data: %s [...]', res.read()[:100])
if res.status != 200:
if res.status == 401:
raise Exception('Server rejected request: wrong '
'username or password')
raise Exception('Wrong server response: %s %s' %
(res.status, res.reason))
res.fp = None
log.debug('Connection established')
return conn
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True):
if headers is None:
headers = {}
headers['Connection'] = 'Keep-Alive'
return super(NTLMConnectionPool, self).urlopen(method, url, body,
headers, retries,
redirect,
assert_same_host)
| gpl-3.0 |
Venturi/cms | env/lib/python2.7/site-packages/cms/test_utils/project/customuserapp/south_migrations/0001_initial.py | 35 | 5693 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'User'
db.create_table(u'customuserapp_user', (
(u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('password', self.gf('django.db.models.fields.CharField')(max_length=128)),
('last_login', self.gf('django.db.models.fields.DateTimeField')(default=datetime.datetime.now)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)),
('username', self.gf('django.db.models.fields.CharField')(unique=True, max_length=300)),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('my_new_field', self.gf('django.db.models.fields.IntegerField')(default=42, null=True, blank=True)),
))
db.send_create_signal(u'customuserapp', ['User'])
# Adding M2M table for field groups on 'User'
m2m_table_name = db.shorten_name(u'customuserapp_user_groups')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'customuserapp.user'], null=False)),
('group', models.ForeignKey(orm[u'auth.group'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'group_id'])
# Adding M2M table for field user_permissions on 'User'
m2m_table_name = db.shorten_name(u'customuserapp_user_user_permissions')
db.create_table(m2m_table_name, (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('user', models.ForeignKey(orm[u'customuserapp.user'], null=False)),
('permission', models.ForeignKey(orm[u'auth.permission'], null=False))
))
db.create_unique(m2m_table_name, ['user_id', 'permission_id'])
def backwards(self, orm):
# Deleting model 'User'
db.delete_table(u'customuserapp_user')
# Removing M2M table for field groups on 'User'
db.delete_table(db.shorten_name(u'customuserapp_user_groups'))
# Removing M2M table for field user_permissions on 'User'
db.delete_table(db.shorten_name(u'customuserapp_user_user_permissions'))
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'customuserapp.user': {
'Meta': {'object_name': 'User'},
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'my_new_field': ('django.db.models.fields.IntegerField', [], {'default': '42', 'null': 'True', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '300'})
}
}
complete_apps = ['customuserapp'] | gpl-2.0 |
cirrusone/phantom2 | src/qt/qtwebkit/Tools/CygwinDownloader/cygwin-downloader.py | 120 | 5513 | #!/usr/bin/env python
import os, random, sys, time, urllib
#
# Options
#
dry_run = len(sys.argv) > 1 and "--dry-run" in set(sys.argv[1:])
quiet = len(sys.argv) > 1 and "--quiet" in set(sys.argv[1:])
#
# Functions and constants
#
def download_progress_hook(block_count, block_size, total_blocks):
if quiet or random.random() > 0.5:
return
sys.stdout.write(".")
sys.stdout.flush()
def download_url_to_file(url, file, message):
if not quiet:
print message + " ",
if not dry_run:
dir = os.path.dirname(file)
if len(dir) and not os.path.exists(dir):
os.makedirs(dir)
urllib.urlretrieve(url, file, download_progress_hook)
if not quiet:
print
# This is mostly just the list of North America http mirrors from http://cygwin.com/mirrors.html,
# but a few have been removed that seemed unresponsive from Cupertino.
mirror_servers = ["http://cygwin.elite-systems.org/",
"http://mirror.mcs.anl.gov/cygwin/",
"http://cygwin.osuosl.org/",
"http://mirrors.kernel.org/sourceware/cygwin/",
"http://mirrors.xmission.com/cygwin/",
"http://sourceware.mirrors.tds.net/pub/sourceware.org/cygwin/"]
package_mirror_url = mirror_servers[random.choice(range(len(mirror_servers)))]
def download_package(package, message):
download_url_to_file(package_mirror_url + package["path"], package["path"], message)
required_packages = frozenset(["apache",
"bc",
"bison",
"curl",
"diffutils",
"e2fsprogs",
"emacs",
"flex",
"gcc",
"gperf",
"keychain",
"make",
"minires",
"nano",
"openssh",
"patch",
"perl",
"perl-libwin32",
"python",
"rebase",
"rsync",
"ruby",
"subversion",
"unzip",
"vim",
"zip"])
#
# Main
#
print "Using Cygwin mirror server " + package_mirror_url + " to download setup.ini..."
urllib.urlretrieve(package_mirror_url + "setup.ini", "setup.ini.orig")
downloaded_packages_file_path = "setup.ini.orig"
downloaded_packages_file = file(downloaded_packages_file_path, "r")
if not dry_run:
modified_packages_file = file("setup.ini", "w")
packages = {}
current_package = ''
for line in downloaded_packages_file.readlines():
if line[0] == "@":
current_package = line[2:-1]
packages[current_package] = {"name": current_package, "needs_download": False, "requires": [], "path": ""}
elif line[:10] == "category: ":
if current_package in required_packages:
line = "category: Base\n"
if "Base" in set(line[10:-1].split()):
packages[current_package]["needs_download"] = True
elif line[:10] == "requires: ":
packages[current_package]["requires"] = line[10:].split()
packages[current_package]["requires"].sort()
elif line[:9] == "install: " and not len(packages[current_package]["path"]):
end_of_path = line.find(" ", 9)
if end_of_path != -1:
packages[current_package]["path"] = line[9:end_of_path]
if not dry_run:
modified_packages_file.write(line)
downloaded_packages_file.close()
os.remove(downloaded_packages_file_path)
if not dry_run:
modified_packages_file.close()
names_to_download = set()
package_names = packages.keys()
package_names.sort()
def add_package_and_dependencies(name):
if name in names_to_download:
return
if not name in packages:
return
packages[name]["needs_download"] = True
names_to_download.add(name)
for dep in packages[name]["requires"]:
add_package_and_dependencies(dep)
for name in package_names:
if packages[name]["needs_download"]:
add_package_and_dependencies(name)
downloaded_so_far = 0
for name in package_names:
if packages[name]["needs_download"]:
downloaded_so_far += 1
download_package(packages[name], "Downloading package %3d of %3d (%s)" % (downloaded_so_far, len(names_to_download), name))
download_url_to_file("http://cygwin.com/setup.exe", "setup.exe", "Downloading setup.exe")
seconds_to_sleep = 10
print """
Finished downloading Cygwin. In %d seconds,
I will run setup.exe. Select the "Install
from Local Directory" option and browse to
"%s"
when asked for the "Local Package Directory".
""" % (seconds_to_sleep, os.getcwd())
while seconds_to_sleep > 0:
print "%d..." % seconds_to_sleep,
sys.stdout.flush()
time.sleep(1)
seconds_to_sleep -= 1
print
if not dry_run:
os.execl("setup.exe")
| bsd-3-clause |
minlexx/pyevemon | esi_client/models/get_characters_character_id_mail_labels_forbidden.py | 1 | 3097 | # coding: utf-8
"""
EVE Swagger Interface
An OpenAPI for EVE Online
OpenAPI spec version: 0.4.6
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class GetCharactersCharacterIdMailLabelsForbidden(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, error=None):
"""
GetCharactersCharacterIdMailLabelsForbidden - a model defined in Swagger
:param dict swaggerTypes: The key is attribute name
and the value is attribute type.
:param dict attributeMap: The key is attribute name
and the value is json key in definition.
"""
self.swagger_types = {
'error': 'str'
}
self.attribute_map = {
'error': 'error'
}
self._error = error
@property
def error(self):
"""
Gets the error of this GetCharactersCharacterIdMailLabelsForbidden.
Forbidden message
:return: The error of this GetCharactersCharacterIdMailLabelsForbidden.
:rtype: str
"""
return self._error
@error.setter
def error(self, error):
"""
Sets the error of this GetCharactersCharacterIdMailLabelsForbidden.
Forbidden message
:param error: The error of this GetCharactersCharacterIdMailLabelsForbidden.
:type: str
"""
self._error = error
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, GetCharactersCharacterIdMailLabelsForbidden):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| gpl-3.0 |
jwlawson/tensorflow | tensorflow/python/saved_model/utils_impl.py | 48 | 3249 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""SavedModel utility functions implementation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.core.protobuf import meta_graph_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import sparse_tensor
# TensorInfo helpers.
def build_tensor_info(tensor):
"""Utility function to build TensorInfo proto.
Args:
tensor: Tensor or SparseTensor whose name, dtype and shape are used to
build the TensorInfo. For SparseTensors, the names of the three
constitutent Tensors are used.
Returns:
A TensorInfo protocol buffer constructed based on the supplied argument.
"""
tensor_info = meta_graph_pb2.TensorInfo(
dtype=dtypes.as_dtype(tensor.dtype).as_datatype_enum,
tensor_shape=tensor.get_shape().as_proto())
if isinstance(tensor, sparse_tensor.SparseTensor):
tensor_info.coo_sparse.values_tensor_name = tensor.values.name
tensor_info.coo_sparse.indices_tensor_name = tensor.indices.name
tensor_info.coo_sparse.dense_shape_tensor_name = tensor.dense_shape.name
else:
tensor_info.name = tensor.name
return tensor_info
def get_tensor_from_tensor_info(tensor_info, graph=None, import_scope=None):
"""Returns the Tensor or SparseTensor described by a TensorInfo proto.
Args:
tensor_info: A TensorInfo proto describing a Tensor or SparseTensor.
graph: The tf.Graph in which tensors are looked up. If None, the
current default graph is used.
import_scope: If not None, names in `tensor_info` are prefixed with this
string before lookup.
Returns:
The Tensor or SparseTensor in `graph` described by `tensor_info`.
Raises:
KeyError: If `tensor_info` does not correspond to a tensor in `graph`.
ValueError: If `tensor_info` is malformed.
"""
graph = graph if graph is not None else ops.get_default_graph()
def _get_tensor(name):
return graph.get_tensor_by_name(
ops.prepend_name_scope(name, import_scope=import_scope))
encoding = tensor_info.WhichOneof("encoding")
if encoding == "name":
return _get_tensor(tensor_info.name)
elif encoding == "coo_sparse":
return sparse_tensor.SparseTensor(
_get_tensor(tensor_info.coo_sparse.indices_tensor_name),
_get_tensor(tensor_info.coo_sparse.values_tensor_name),
_get_tensor(tensor_info.coo_sparse.dense_shape_tensor_name))
else:
raise ValueError("Invalid TensorInfo.encoding: %s" % encoding)
| apache-2.0 |
jortel/gofer | test/unit/messaging/adapter/amqp/test_model.py | 1 | 5194 | # Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
from unittest import TestCase
from mock import Mock, patch
from gofer.devel import ipatch
with ipatch('amqp'):
from gofer.messaging.adapter.amqp.model import Exchange, BaseExchange
from gofer.messaging.adapter.amqp.model import Queue, BaseQueue
class TestExchange(TestCase):
def test_init(self):
name = 'test-exchange'
policy = 'direct'
# test
exchange = Exchange(name, policy=policy)
# validation
self.assertTrue(isinstance(exchange, BaseExchange))
self.assertEqual(exchange.name, name)
self.assertEqual(exchange.policy, policy)
self.assertEqual(exchange.auto_delete, False)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_declare(self, channel):
url = 'test-url'
# test
exchange = Exchange('test', policy='direct')
exchange.declare(url)
# validation
channel.return_value.exchange_declare.assert_called_once_with(
exchange.name,
exchange.policy,
durable=exchange.durable,
auto_delete=exchange.auto_delete)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_delete(self, channel):
url = 'test-url'
# test
exchange = Exchange('test')
exchange.delete(url)
# validation
channel.return_value.exchange_delete.assert_called_once_with(exchange.name, nowait=True)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_bind(self, channel):
url = 'test-url'
queue = BaseQueue('test-queue')
# test
exchange = Exchange('test-exchange')
exchange.bind(queue, url)
# validation
channel.return_value.queue_bind.assert_called_once_with(
queue.name,
exchange=exchange.name,
routing_key=queue.name)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_unbind(self, channel):
url = 'test-url'
queue = BaseQueue('test-queue')
# test
exchange = Exchange('test-exchange')
exchange.unbind(queue, url)
# validation
channel.return_value.queue_unbind.assert_called_once_with(
queue.name,
exchange=exchange.name,
routing_key=queue.name)
class TestQueue(TestCase):
def test_init(self):
name = 'test-queue'
queue = Queue(name)
self.assertEqual(queue.name, name)
self.assertTrue(isinstance(queue, BaseQueue))
self.assertEqual(queue.exclusive, False)
self.assertEqual(queue.auto_delete, False)
self.assertEqual(queue.expiration, 0)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_declare(self, channel):
url = 'test-url'
# test
queue = Queue('test')
queue.declare(url)
# validation
channel.return_value.queue_declare.assert_called_once_with(
queue.name,
durable=queue.durable,
exclusive=queue.exclusive,
auto_delete=queue.auto_delete,
arguments=None)
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_declare_auto_delete(self, channel):
url = 'test-url'
# test
queue = Queue('test')
queue.auto_delete = True
queue.expiration = 10
queue.declare(url)
# validation
channel.return_value.queue_declare.assert_called_once_with(
queue.name,
durable=queue.durable,
exclusive=queue.exclusive,
auto_delete=queue.auto_delete,
arguments={'x-expires': queue.expiration * 1000})
@patch('gofer.messaging.adapter.amqp.reliability.Connection.channel')
@patch('gofer.messaging.adapter.amqp.connection.RealConnection', Mock())
def test_delete(self, channel):
url = 'test-url'
# test
queue = Queue('test')
queue.delete(url)
# validation
channel.return_value.queue_delete.assert_called_once_with(queue.name, nowait=True)
| lgpl-2.1 |
zayneanderson/aima-python | submissions/LaMartina/puzzles.py | 18 | 8106 | import search
import math
from copy import deepcopy
from math import(cos, pi)
# A sample map problem
sumner_map = search.UndirectedGraph(dict(
Portland=dict(Mitchellville=7, Fairfield=17, Cottontown=18),
Cottontown=dict(Portland=18),
Fairfield=dict(Mitchellville=21, Portland=17),
Mitchellville=dict(Portland=7, Fairfield=21),
))
#converts latitude to miles:
def latitude(lat):
return lat * 69
#converts longitude to miles:
def longitude(lat,long):
return long * 69 * math.cos(math.radians(lat))
kc_map = search.UndirectedGraph(dict(
KansasCity=dict(Independence=11,OverlandPark=12,Atchison=50,),
Independence=dict(Higginsville=46,LeesSummit=18,KansasCity=11),
Higginsville=dict(Warrensburg=22,Independence=46),
LeesSummit=dict(Warrensburg=39,Independence=18),
Warrensburg=dict(Sedalia=30,Higginsville=22,LeesSummit=39),
Sedalia=dict(Warsaw=35,Warrensburg=30),
Warsaw=dict(Clinton=30, Sedalia=35),
Clinton=dict(RichHill=51,Warsaw=30),
RichHill=dict(Ottawa=87,Clinton=51),
Ottawa=dict(OsageCity=32,RichHill=87),
OsageCity=dict(Ottawa=32),
OverlandPark=dict(Olathe=12,Lawrence=35,KansasCity=12,Holton=98),
Olathe=dict(Lawrence=30,OverlandPark=12),
Lawrence=dict(Topeka=28,OverlandPark=35,Olathe=30),
Atchison=dict(KansasCity=50),
Topeka=dict(Lawrence=28,StMarys=26),
StMarys=dict(Topeka=26,Holton=37),
Holton=dict(OverlandPark=98,StMarys=37),
))
kc_map.locations = dict(
KansasCity=(latitude(39.0997), longitude(39.0997,94.5786)),
Independence=(latitude(39.0911), longitude(39.0911,94.4155)),
Higginsville=(latitude(39.0725), longitude(39.0725,93.7172)),
LeesSummit=(latitude(38.9108), longitude(38.9108,94.3822)),
Warrensburg=(latitude(38.7628), longitude(38.7628,93.7360)),
Sedalia=(latitude(38.7045), longitude(38.7045,93.2283)),
Warsaw=(latitude(38.2431), longitude(38.2431,93.3819)),
Clinton= (latitude(38.3686), longitude(38.3686, 93.7783)),
RichHill= (latitude(38.0964), longitude(38.0964, 94.3611)),
Ottawa= (latitude(38.6158), longitude(38.6158, 95.2686)),
OsageCity= (latitude(39.0000), longitude(38.6339, 95.8258)),
OverlandPark= (latitude(38.9822), longitude(38.9822, 94.6708)),
Olathe=(latitude(38.8814), longitude(38.8814,94.8191)),
Lawrence=(latitude(38.9717), longitude(38.9717,95.2353)),
Atchison=(latitude(39.5631), longitude(39.5631,95.1216)),
Topeka=(latitude(39.0558), longitude(39.0558,95.6890)),
StMarys=(latitude(39.1942), longitude(39.1942,96.0711)),
Holton=(latitude(39.4653), longitude(39.4653,95.7364)),
)
sumner_puzzle = search.GraphProblem('Cottontown', 'Mitchellville', sumner_map)
sumner_puzzle.label = 'Sumner Map'
sumner_puzzle.description = '''
An abbreviated map of Sumner County, TN.
This map is unique, to the best of my knowledge.
'''
kcmapTopeka_puzzle = search.GraphProblem('OsageCity','Topeka', kc_map)
kcmapTopeka_puzzle.label = 'Kansas City Map'
kcmapTopeka_puzzle.description = '''
A map of the Kansas City area in the Missouri-Kansas Bistate Area.
'''
kcmapStMarys_puzzle = search.GraphProblem('KansasCity','StMarys', kc_map)
kcmapStMarys_puzzle.label = 'Kansas City Map'
kcmapStMarys_puzzle.description = '''
A map of the Kansas City area in the Missouri-Kansas Bistate Area.
'''
# A trivial Problem definition
class LightSwitch(search.Problem):
def actions(self, state):
return ['up', 'down']
def result(self, state, action):
if action == 'up':
return 'on'
else:
return 'off'
def goal_test(self, state):
return state == 'on'
def h(self, node):
state = node.state
if self.goal_test(state):
return 0
else:
return 1
switch_puzzle = LightSwitch('off')
switch_puzzle.label = 'Light Switch'
#version of the sixteen puzzle that is 2 by 2
# class SixteenPuzzle(search.Problem):
# def actions(self, state):
# return ['uR', 'uL','dR','dL','lD','rD','rU','rD']
#
# def result(self, state, action):
# newState = state
# if action == 'uR' or action == 'uL':
# newState[2] = state[1]
# newState[1] = state[2]
# if action == 'dR' or action == 'dL':
# newState[3] = state[4]
# newState[4] = state[3]
# if action == 'lD' or action == 'lU':
# newState[1] = state[3]
# newState[3] = state[1]
# if action == 'rD' or action == 'rU':
# newState[2] = state[4]
# newState[4] = state[2]
#
#
# def goal_test(self, state):
# return state == ['1','2','3','4']
#
# def h(self, node):
# state = node.state
# if self.goal_test(state):
# return 0
# else:
# return 1
#
class SixteenPuzzle(search.Problem):
# # def __init__(self, initial, goal=('1','2','3','4')):
# # self.initial = initial
# # self.goal = goal
#
def actions(self, state):
return ['uR', 'uL','dR','dL','lD','rD','rU','rD']
def result(self, state, action):
newState = deepcopy(state)
a,b,c,d = newState
if action == 'uR' or action == 'uL':
a = b
b = a
if action == 'dR' or action == 'dL':
c = d
d = c
if action == 'lD' or action == 'lU':
a = c
c = a
if action == 'rD' or action == 'rU':
b = d
d = b
newState = (a,b,c,d)
return newState
def goal_test(self, state):
return state == ('1','2','3','4')
def path_cost(self, c, state1, action, state2):
return c+1
def h(self, node):
state = node.state
if self.goal_test(state):
return 0
else:
return 1
# class SixteenPuzzle(search.Problem):
# def actions(self, state):
# return ['uR', 'uL','dR','dL','lD','rD','rU','rD']
#
# def result(self, state, action):
# newState = deepcopy(state)
# if action == 'uR' or action == 'uL':
# newState[0] = state[1]
# newState[1] = state[0]
# if action == 'dR' or action == 'dL':
# newState[3] = state[2]
# newState[2] = state[3]
# if action == 'lD' or action == 'lU':
# newState[0] = state[2]
# newState[2] = state[0]
# if action == 'rD' or action == 'rU':
# newState[1] = state[3]
# newState[3] = state[1]
#
# return newState
# def goal_test(self, state):
# return state[0] == '1' and state[1] == '2' and state[2] == '3' and state[3] == '4'
#
# def h(self, node):
# state = node.state
# if self.goal_test(state):
# return 0
# else:
# return 1
# class SixteenPuzzle(search.Problem):
# def _init_(self,state):
# self.initial = state
# def actions(self, state):
# return ['uR', 'uL','dR','dL','lD','rD','rU','rD']
#
# def result(self, state, action):
# newState = state
# a = newState[0]
# b = newState[1]
# c = newState[2]
# d = newState[3]
# if action == 'uR' or action == 'uL':
# a = b
# b = a
# if action == 'dR' or action == 'dL':
# c = d
# d = c
# if action == 'lD' or action == 'lU':
# a = c
# c = a
# if action == 'rD' or action == 'rU':
# b = d
# d = b
# newState = a + b + c + d
# return newState
#
#
# def goal_test(self, state):
# return state == ('1234')
#
# def path_cost(self, c, state1, action, state2):
# return c+1
# def h(self, node):
# state = node.state
# if self.goal_test(state):
# return 0
# else:
# return 1
sixteen_puzzle = SixteenPuzzle(['2','3','4','1'])
sixteen_puzzle.label = 'Sixteen Puzzle'
myPuzzles = [
kcmapTopeka_puzzle,
kcmapStMarys_puzzle,
sumner_puzzle,
switch_puzzle,
#sixteen_puzzle,
] | mit |
dafrito/trac-mirror | trac/ticket/default_workflow.py | 1 | 21721 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2006-2009 Edgewall Software
# Copyright (C) 2006 Alec Thomas
# Copyright (C) 2007 Eli Carter
# Copyright (C) 2007 Christian Boos <cboos@edgewall.org>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
#
# Author: Eli Carter
import pkg_resources
from ConfigParser import RawConfigParser
from StringIO import StringIO
from genshi.builder import tag
from trac.config import Configuration, ConfigSection
from trac.core import *
from trac.env import IEnvironmentSetupParticipant
from trac.perm import PermissionSystem
from trac.ticket.api import ITicketActionController, TicketSystem
from trac.ticket.model import Resolution
from trac.util.text import obfuscate_email_address
from trac.util.translation import _, tag_, cleandoc_
from trac.web.chrome import Chrome, add_script, add_script_data
from trac.wiki.macros import WikiMacroBase
# -- Utilities for the ConfigurableTicketWorkflow
def parse_workflow_config(rawactions):
"""Given a list of options from [ticket-workflow]"""
actions = {}
for option, value in rawactions:
parts = option.split('.')
action = parts[0]
if action not in actions:
actions[action] = {'oldstates': '', 'newstate': ''}
if len(parts) == 1:
# Base name, of the syntax: old,states,here -> newstate
try:
oldstates, newstate = [x.strip() for x in value.split('->')]
except ValueError:
continue # Syntax error, a warning will be logged later
actions[action]['newstate'] = newstate
actions[action]['oldstates'] = oldstates
else:
action, attribute = option.split('.')
actions[action][attribute] = value
# Fill in the defaults for every action, and normalize them to the desired
# types
def as_list(key):
value = attributes.get(key, '')
return [item for item in (x.strip() for x in value.split(',')) if item]
for action, attributes in actions.items():
# Default the 'name' attribute to the name used in the ini file
if 'name' not in attributes:
attributes['name'] = action
# If not specified, an action is not the default.
attributes['default'] = int(attributes.get('default', 0))
# If operations are not specified, that means no operations
attributes['operations'] = as_list('operations')
# If no permissions are specified, then no permissions are needed
attributes['permissions'] = as_list('permissions')
# Normalize the oldstates
attributes['oldstates'] = as_list('oldstates')
return actions
def get_workflow_config(config):
"""Usually passed self.config, this will return the parsed ticket-workflow
section.
"""
raw_actions = list(config.options('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
return actions
def load_workflow_config_snippet(config, filename):
"""Loads the ticket-workflow section from the given file (expected to be in
the 'workflows' tree) into the provided config.
"""
filename = pkg_resources.resource_filename('trac.ticket',
'workflows/%s' % filename)
new_config = Configuration(filename)
for name, value in new_config.options('ticket-workflow'):
config.set('ticket-workflow', name, value)
class ConfigurableTicketWorkflow(Component):
"""Ticket action controller which provides actions according to a
workflow defined in trac.ini.
The workflow is idefined in the `[ticket-workflow]` section of the
[wiki:TracIni#ticket-workflow-section trac.ini] configuration file.
"""
ticket_workflow_section = ConfigSection('ticket-workflow',
"""The workflow for tickets is controlled by plugins. By default,
there's only a `ConfigurableTicketWorkflow` component in charge.
That component allows the workflow to be configured via this section
in the `trac.ini` file. See TracWorkflow for more details.
(''since 0.11'')""")
def __init__(self, *args, **kwargs):
self.actions = get_workflow_config(self.config)
if not '_reset' in self.actions:
# Special action that gets enabled if the current status no longer
# exists, as no other action can then change its state. (#5307)
self.actions['_reset'] = {
'default': 0,
'name': 'reset',
'newstate': 'new',
'oldstates': [], # Will not be invoked unless needed
'operations': ['reset_workflow'],
'permissions': []}
self.log.debug('Workflow actions at initialization: %s\n' %
str(self.actions))
for name, info in self.actions.iteritems():
if not info['newstate']:
self.log.warning("Ticket workflow action '%s' doesn't define "
"any transitions", name)
implements(ITicketActionController, IEnvironmentSetupParticipant)
# IEnvironmentSetupParticipant methods
def environment_created(self):
"""When an environment is created, we provide the basic-workflow,
unless a ticket-workflow section already exists.
"""
if not 'ticket-workflow' in self.config.sections():
load_workflow_config_snippet(self.config, 'basic-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
def environment_needs_upgrade(self, db):
"""The environment needs an upgrade if there is no [ticket-workflow]
section in the config.
"""
return not list(self.config.options('ticket-workflow'))
def upgrade_environment(self, db):
"""Insert a [ticket-workflow] section using the original-workflow"""
load_workflow_config_snippet(self.config, 'original-workflow.ini')
self.config.save()
self.actions = get_workflow_config(self.config)
info_message = """
==== Upgrade Notice ====
The ticket Workflow is now configurable.
Your environment has been upgraded, but configured to use the original
workflow. It is recommended that you look at changing this configuration to use
basic-workflow.
Read TracWorkflow for more information (don't forget to 'wiki upgrade' as well)
"""
self.log.info(info_message.replace('\n', ' ').replace('==', ''))
print info_message
# ITicketActionController methods
def get_ticket_actions(self, req, ticket):
"""Returns a list of (weight, action) tuples that are valid for this
request and this ticket."""
# Get the list of actions that can be performed
# Determine the current status of this ticket. If this ticket is in
# the process of being modified, we need to base our information on the
# pre-modified state so that we don't try to do two (or more!) steps at
# once and get really confused.
status = ticket._old.get('status', ticket['status']) or 'new'
ticket_perm = req.perm(ticket.resource)
allowed_actions = []
for action_name, action_info in self.actions.items():
oldstates = action_info['oldstates']
if oldstates == ['*'] or status in oldstates:
# This action is valid in this state. Check permissions.
required_perms = action_info['permissions']
if self._is_action_allowed(ticket_perm, required_perms):
allowed_actions.append((action_info['default'],
action_name))
if not (status in ['new', 'closed'] or \
status in TicketSystem(self.env).get_all_status()) \
and 'TICKET_ADMIN' in ticket_perm:
# State no longer exists - add a 'reset' action if admin.
allowed_actions.append((0, '_reset'))
return allowed_actions
def _is_action_allowed(self, ticket_perm, required_perms):
if not required_perms:
return True
for permission in required_perms:
if permission in ticket_perm:
return True
return False
def get_all_status(self):
"""Return a list of all states described by the configuration.
"""
all_status = set()
for action_name, action_info in self.actions.items():
all_status.update(action_info['oldstates'])
all_status.add(action_info['newstate'])
all_status.discard('*')
all_status.discard('')
return all_status
def render_ticket_action_control(self, req, ticket, action):
self.log.debug('render_ticket_action_control: action "%s"' % action)
this_action = self.actions[action]
status = this_action['newstate']
operations = this_action['operations']
current_owner_or_empty = ticket._old.get('owner', ticket['owner'])
current_owner = current_owner_or_empty or '(none)'
if not (Chrome(self.env).show_email_addresses
or 'EMAIL_VIEW' in req.perm(ticket.resource)):
format_user = obfuscate_email_address
else:
format_user = lambda address: address
current_owner = format_user(current_owner)
control = [] # default to nothing
hints = []
if 'reset_workflow' in operations:
control.append(tag("from invalid state "))
hints.append(_("Current state no longer exists"))
if 'del_owner' in operations:
hints.append(_("The ticket will be disowned"))
if 'set_owner' in operations:
id = 'action_%s_reassign_owner' % action
selected_owner = req.args.get(id, req.authname)
if this_action.has_key('set_owner'):
owners = [x.strip() for x in
this_action['set_owner'].split(',')]
elif self.config.getbool('ticket', 'restrict_owner'):
perm = PermissionSystem(self.env)
owners = perm.get_users_with_permission('TICKET_MODIFY')
owners.sort()
else:
owners = None
if owners == None:
owner = req.args.get(id, req.authname)
control.append(tag_('to %(owner)s',
owner=tag.input(type='text', id=id,
name=id, value=owner)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the specified user",
current_owner=current_owner))
elif len(owners) == 1:
owner = tag.input(type='hidden', id=id, name=id,
value=owners[0])
formatted_owner = format_user(owners[0])
control.append(tag_('to %(owner)s ',
owner=tag(formatted_owner, owner)))
if ticket['owner'] != owners[0]:
hints.append(_("The owner will be changed from "
"%(current_owner)s to %(selected_owner)s",
current_owner=current_owner,
selected_owner=formatted_owner))
else:
control.append(tag_('to %(owner)s', owner=tag.select(
[tag.option(x, value=x,
selected=(x == selected_owner or None))
for x in owners],
id=id, name=id)))
hints.append(_("The owner will be changed from "
"%(current_owner)s to the selected user",
current_owner=current_owner))
elif 'set_owner_to_self' in operations and \
ticket._old.get('owner', ticket['owner']) != req.authname:
hints.append(_("The owner will be changed from %(current_owner)s "
"to %(authname)s", current_owner=current_owner,
authname=req.authname))
if 'set_resolution' in operations:
if this_action.has_key('set_resolution'):
resolutions = [x.strip() for x in
this_action['set_resolution'].split(',')]
else:
resolutions = [val.name for val in Resolution.select(self.env)]
if not resolutions:
raise TracError(_("Your workflow attempts to set a resolution "
"but none is defined (configuration issue, "
"please contact your Trac admin)."))
id = 'action_%s_resolve_resolution' % action
if len(resolutions) == 1:
resolution = tag.input(type='hidden', id=id, name=id,
value=resolutions[0])
control.append(tag_('as %(resolution)s',
resolution=tag(resolutions[0],
resolution)))
hints.append(_("The resolution will be set to %(name)s",
name=resolutions[0]))
else:
selected_option = req.args.get(id,
TicketSystem(self.env).default_resolution)
control.append(tag_('as %(resolution)s',
resolution=tag.select(
[tag.option(x, value=x,
selected=(x == selected_option or None))
for x in resolutions],
id=id, name=id)))
hints.append(_("The resolution will be set"))
if 'del_resolution' in operations:
hints.append(_("The resolution will be deleted"))
if 'leave_status' in operations:
control.append(_('as %(status)s ',
status= ticket._old.get('status',
ticket['status'])))
if len(operations) == 1:
hints.append(_("The owner will remain %(current_owner)s",
current_owner=current_owner)
if current_owner_or_empty else
_("The ticket will remain with no owner"))
else:
if status != '*':
hints.append(_("Next status will be '%(name)s'", name=status))
return (this_action['name'], tag(*control), '. '.join(hints) + '.'
if hints else '')
def get_ticket_changes(self, req, ticket, action):
this_action = self.actions[action]
# Enforce permissions
if not self._has_perms_for_action(req, this_action, ticket.resource):
# The user does not have any of the listed permissions, so we won't
# do anything.
return {}
updated = {}
# Status changes
status = this_action['newstate']
if status != '*':
updated['status'] = status
for operation in this_action['operations']:
if operation == 'reset_workflow':
updated['status'] = 'new'
elif operation == 'del_owner':
updated['owner'] = ''
elif operation == 'set_owner':
newowner = req.args.get('action_%s_reassign_owner' % action,
this_action.get('set_owner', '').strip())
# If there was already an owner, we get a list, [new, old],
# but if there wasn't we just get new.
if type(newowner) == list:
newowner = newowner[0]
updated['owner'] = newowner
elif operation == 'set_owner_to_self':
updated['owner'] = req.authname
elif operation == 'del_resolution':
updated['resolution'] = ''
elif operation == 'set_resolution':
newresolution = req.args.get('action_%s_resolve_resolution' % \
action,
this_action.get('set_resolution', '').strip())
updated['resolution'] = newresolution
# leave_status is just a no-op here, so we don't look for it.
return updated
def apply_action_side_effects(self, req, ticket, action):
pass
def _has_perms_for_action(self, req, action, resource):
required_perms = action['permissions']
if required_perms:
for permission in required_perms:
if permission in req.perm(resource):
break
else:
# The user does not have any of the listed permissions
return False
return True
# Public methods (for other ITicketActionControllers that want to use
# our config file and provide an operation for an action)
def get_actions_by_operation(self, operation):
"""Return a list of all actions with a given operation
(for use in the controller's get_all_status())
"""
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations']]
return actions
def get_actions_by_operation_for_req(self, req, ticket, operation):
"""Return list of all actions with a given operation that are valid
in the given state for the controller's get_ticket_actions().
If state='*' (the default), all actions with the given operation are
returned.
"""
# Be sure to look at the original status.
status = ticket._old.get('status', ticket['status'])
actions = [(info['default'], action) for action, info
in self.actions.items()
if operation in info['operations'] and
('*' in info['oldstates'] or
status in info['oldstates']) and
self._has_perms_for_action(req, info, ticket.resource)]
return actions
class WorkflowMacro(WikiMacroBase):
_domain = 'messages'
_description = cleandoc_(
"""Render a workflow graph.
This macro accepts a TracWorkflow configuration and renders the states
and transitions as a directed graph. If no parameters are given, the
current ticket workflow is rendered. In WikiProcessors mode the `width`
and `height` arguments can be specified.
(Defaults: `width = 800` and `heigth = 600`)
Examples:
{{{
[[Workflow()]]
[[Workflow(go = here -> there; return = there -> here)]]
{{{
#!Workflow width=700 height=700
leave = * -> *
leave.operations = leave_status
leave.default = 1
accept = new,assigned,accepted,reopened -> accepted
accept.permissions = TICKET_MODIFY
accept.operations = set_owner_to_self
resolve = new,assigned,accepted,reopened -> closed
resolve.permissions = TICKET_MODIFY
resolve.operations = set_resolution
reassign = new,assigned,accepted,reopened -> assigned
reassign.permissions = TICKET_MODIFY
reassign.operations = set_owner
reopen = closed -> reopened
reopen.permissions = TICKET_CREATE
reopen.operations = del_resolution
}}}
}}}
""")
def expand_macro(self, formatter, name, text, args):
if not text:
raw_actions = self.config.options('ticket-workflow')
else:
if args is None:
text = '\n'.join([line.lstrip() for line in text.split(';')])
if not '[ticket-workflow]' in text:
text = '[ticket-workflow]\n' + text
parser = RawConfigParser()
parser.readfp(StringIO(text))
raw_actions = list(parser.items('ticket-workflow'))
actions = parse_workflow_config(raw_actions)
states = list(set(
[state for action in actions.itervalues()
for state in action['oldstates']] +
[action['newstate'] for action in actions.itervalues()]))
action_names = actions.keys()
edges = []
for name, action in actions.items():
new_index = states.index(action['newstate'])
name_index = action_names.index(name)
for old_state in action['oldstates']:
old_index = states.index(old_state)
edges.append((old_index, new_index, name_index))
args = args or {}
graph = {'nodes': states, 'actions': action_names, 'edges': edges,
'width': args.get('width', 800),
'height': args.get('height', 600)}
graph_id = '%012x' % id(graph)
req = formatter.req
add_script(req, 'common/js/excanvas.js', ie_if='IE')
add_script(req, 'common/js/workflow_graph.js')
add_script_data(req, {'graph_%s' % graph_id: graph})
return tag.div(_("Enable JavaScript to display the workflow graph."),
class_='trac-workflow-graph system-message',
id='trac-workflow-graph-%s' % graph_id)
| bsd-3-clause |
pombredanne/commons | src/python/twitter/common/http/diagnostics.py | 14 | 2595 | # ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
import pstats
import sys
import threading
import traceback
try:
import cStringIO as StringIO
except ImportError:
import StringIO
try:
from twitter.common import app
HAS_APP = True
except ImportError:
HAS_APP = False
from .server import HttpServer, route
class DiagnosticsEndpoints(object):
"""
Export the thread stacks of the running process.
"""
UNHEALTHY = threading.Event()
@classmethod
def generate_stacks(cls):
threads = dict((th.ident, th) for th in threading.enumerate())
tb = []
for thread_id, stack in sys._current_frames().items():
tb.append("\n\n# Thread%s: %s (%s, %d)" % (
' (daemon)' if threads[thread_id].daemon else '',
threads[thread_id].__class__.__name__, threads[thread_id].name, thread_id))
for filename, lineno, name, line in traceback.extract_stack(stack):
tb.append(' File: "%s", line %d, in %s' % (filename, lineno, name))
if line:
tb.append(" %s" % (line.strip()))
return "\n".join(tb)
@route("/threads")
def handle_threads(self):
HttpServer.set_content_type('text/plain; charset=iso-8859-1')
return self.generate_stacks()
@route("/profile")
def handle_profile(self):
HttpServer.set_content_type('text/plain; charset=iso-8859-1')
if HAS_APP and app.profiler() is not None:
output_stream = StringIO.StringIO()
stats = pstats.Stats(app.profiler(), stream=output_stream)
stats.sort_stats('time', 'name')
stats.print_stats()
return output_stream.getvalue()
else:
return 'Profiling is disabled'
@route("/health")
def handle_health(self):
return 'UNHEALTHY' if self.UNHEALTHY.is_set() else 'OK'
| apache-2.0 |
cs207-project/TimeSeries | procs/_corr.py | 1 | 4794 | import numpy.fft as nfft
import numpy as np
import timeseries as ts
from scipy.stats import norm
# import pyfftw
import sys
#sys.path.append("/Users/yuhantang/CS207/TimeSeries/procs")
from .interface import *
def createfromlist(l):
d = new_darray(len(l))
for i in range(0,len(l)):
darray_set(d,i,l[i])
return d
def tsmaker(m, s, j):
meta={}
meta['order'] = int(np.random.choice([-5, -4, -3, -2, -1, 0, 1, 2, 3, 4, 5]))
meta['blarg'] = int(np.random.choice([1, 2]))
t = np.arange(0.0, 1.0, 0.01)
v = norm.pdf(t, m, s) + j*np.random.randn(100)
return meta, ts.TimeSeries(t, v)
def random_ts(a):
t = np.arange(0.0, 1.0, 0.01)
v = a*np.random.random(100)
return ts.TimeSeries(t, v)
def stand(x, m, s):
return (x-m)/s
def ccor(ts1, ts2):
"given two standardized time series, compute their cross-correlation using FFT"
# Get the next 2 th power 110 -> 128
next_2 = int(2**np.ceil(np.log(len(ts1.values()))))
#
ts1_value = ts1.values()
ts2_value = ts2.values()
ts1_container,ts2_container = [],[]
ts1_zero_container = [0]*len(ts1.values())
ts2_zero_container = [0]*len(ts2.values())
ts1_c_array,ts2_c_array = [None]*(len(ts1.values())*2),[None]*(len(ts2.values())*2)
ts1_c_array[::2] = ts1_value
ts1_c_array[1::2] = ts1_zero_container
ts2_c_array[::2] = ts2_value
ts2_c_array[1::2] = ts2_zero_container
for i in range(len(ts1_c_array)+1,next_2*2):
ts1_c_array.append(np.double(0))
for i in range(len(ts2_c_array)+1,next_2*2):
ts2_c_array.append(np.double(0))
ts1_c_array.insert(0,0)
ts2_c_array.insert(0,0)
ts1_c_array = createfromlist(np.double(ts1_c_array))
ts2_c_array = createfromlist(np.double(ts2_c_array))
four1(ts1_c_array,next_2,1)
four1(ts2_c_array,next_2,1)
for i in range(len(ts2.values())*2+1):
ts1_container.append(darray_get(ts1_c_array,i))
for j in range(len(ts1.values())*2+1):
ts2_container.append(darray_get(ts2_c_array,j))
ts1_fft = np.asarray(ts1_container[1::2]) + 1j * np.asarray(ts1_container[2::2])
ts2_fft = np.asarray(ts2_container[1::2]) + 1j * np.asarray(ts2_container[2::2])
ts1_fft = ts1_fft[:len(ts1)+1]
ts2_fft = ts2_fft[:len(ts2)+1]
# ifft part
ts1_ts2_conj = ts1_fft * np.conj(ts2_fft)
ts1_ts2_ifft_container = [0]*len(ts1_ts2_conj)*2
ts1_ts2_ifft_container[::2] = ts1_ts2_conj.real
ts1_ts2_ifft_container[1::2] = ts1_ts2_conj.imag
for i in range(len(ts1_ts2_conj)+1, next_2 *2):
ts1_ts2_ifft_container.append(0)
ts1_ts2_ifft_container.insert(0,0)
ts1_ts2_ifft_container = createfromlist(ts1_ts2_ifft_container)
four1(ts1_ts2_ifft_container, next_2, -1)
ts1_ts2_ifft_container_python = []
for i in range(len(ts1_ts2_conj)*2+1):
ts1_ts2_ifft_container_python.append(darray_get(ts1_ts2_ifft_container,i))
ccor_value = np.asarray(ts1_ts2_ifft_container_python[1::2])
return 1/len(ts1) * ccor_value
def max_corr_at_phase(ts1, ts2):
ccorts = ccor(ts1, ts2)
idx = np.argmax(ccorts)
maxcorr = ccorts[idx]
return idx, maxcorr
#The equation for the kernelized cross correlation is given at
#http://www.cs.tufts.edu/~roni/PUB/ecml09-tskernels.pdf
#normalize the kernel there by np.sqrt(K(x,x)K(y,y)) so that the correlation
#of a time series with itself is 1.
def kernel_corr(ts1, ts2, mult=1):
"compute a kernelized correlation so that we can get a real distance"
#your code here.
cross_correlation = ccor(ts1, ts2) * mult
corr_ts1, corr_ts2 = ccor(ts1, ts1) * mult, ccor(ts2, ts2) * mult
return np.sum(np.exp(cross_correlation))/np.sqrt(np.sum(np.exp(corr_ts1))*np.sum(np.exp(corr_ts2)))
#this is for a quick and dirty test of these functions
#you might need to add procs to pythonpath for this to work
if __name__ == "__main__":
print("HI")
_, t1 = tsmaker(0.5, 0.1, 0.01)
_, t2 = tsmaker(0.5, 0.1, 0.01)
print(t1.mean(), t1.std(), t2.mean(), t2.std())
import matplotlib.pyplot as plt
plt.plot(t1)
plt.plot(t2)
plt.show()
standts1 = stand(t1, t1.mean(), t1.std())
standts2 = stand(t2, t2.mean(), t2.std())
#print(type(standts1),'this is the type=================*********')
#assert 1 == 2
idx, mcorr = max_corr_at_phase(standts1, standts2)
print(idx, mcorr)
sumcorr = kernel_corr(standts1, standts2, mult=10)
print(sumcorr)
t3 = random_ts(2)
t4 = random_ts(3)
plt.plot(t3)
plt.plot(t4)
plt.show()
standts3 = stand(t3, t3.mean(), t3.std())
standts4 = stand(t4, t4.mean(), t4.std())
idx, mcorr = max_corr_at_phase(standts3, standts4)
print(idx, mcorr)
sumcorr = kernel_corr(standts3, standts4, mult=10)
print(sumcorr)
| mit |
gkoelln/youtube-dl | youtube_dl/extractor/arkena.py | 41 | 5331 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_urlparse
from ..utils import (
determine_ext,
ExtractorError,
float_or_none,
int_or_none,
mimetype2ext,
parse_iso8601,
strip_jsonp,
)
class ArkenaIE(InfoExtractor):
_VALID_URL = r'''(?x)
https?://
(?:
video\.arkena\.com/play2/embed/player\?|
play\.arkena\.com/(?:config|embed)/avp/v\d/player/media/(?P<id>[^/]+)/[^/]+/(?P<account_id>\d+)
)
'''
_TESTS = [{
'url': 'https://play.arkena.com/embed/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411',
'md5': 'b96f2f71b359a8ecd05ce4e1daa72365',
'info_dict': {
'id': 'b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe',
'ext': 'mp4',
'title': 'Big Buck Bunny',
'description': 'Royalty free test video',
'timestamp': 1432816365,
'upload_date': '20150528',
'is_live': False,
},
}, {
'url': 'https://play.arkena.com/config/avp/v2/player/media/b41dda37-d8e7-4d3f-b1b5-9a9db578bdfe/1/129411/?callbackMethod=jQuery1111023664739129262213_1469227693893',
'only_matching': True,
}, {
'url': 'http://play.arkena.com/config/avp/v1/player/media/327336/darkmatter/131064/?callbackMethod=jQuery1111002221189684892677_1469227595972',
'only_matching': True,
}, {
'url': 'http://play.arkena.com/embed/avp/v1/player/media/327336/darkmatter/131064/',
'only_matching': True,
}, {
'url': 'http://video.arkena.com/play2/embed/player?accountId=472718&mediaId=35763b3b-00090078-bf604299&pageStyling=styled',
'only_matching': True,
}]
@staticmethod
def _extract_url(webpage):
# See https://support.arkena.com/display/PLAY/Ways+to+embed+your+video
mobj = re.search(
r'<iframe[^>]+src=(["\'])(?P<url>(?:https?:)?//play\.arkena\.com/embed/avp/.+?)\1',
webpage)
if mobj:
return mobj.group('url')
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
account_id = mobj.group('account_id')
# Handle http://video.arkena.com/play2/embed/player URL
if not video_id:
qs = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query)
video_id = qs.get('mediaId', [None])[0]
account_id = qs.get('accountId', [None])[0]
if not video_id or not account_id:
raise ExtractorError('Invalid URL', expected=True)
playlist = self._download_json(
'https://play.arkena.com/config/avp/v2/player/media/%s/0/%s/?callbackMethod=_'
% (video_id, account_id),
video_id, transform_source=strip_jsonp)['Playlist'][0]
media_info = playlist['MediaInfo']
title = media_info['Title']
media_files = playlist['MediaFiles']
is_live = False
formats = []
for kind_case, kind_formats in media_files.items():
kind = kind_case.lower()
for f in kind_formats:
f_url = f.get('Url')
if not f_url:
continue
is_live = f.get('Live') == 'true'
exts = (mimetype2ext(f.get('Type')), determine_ext(f_url, None))
if kind == 'm3u8' or 'm3u8' in exts:
formats.extend(self._extract_m3u8_formats(
f_url, video_id, 'mp4', 'm3u8_native',
m3u8_id=kind, fatal=False, live=is_live))
elif kind == 'flash' or 'f4m' in exts:
formats.extend(self._extract_f4m_formats(
f_url, video_id, f4m_id=kind, fatal=False))
elif kind == 'dash' or 'mpd' in exts:
formats.extend(self._extract_mpd_formats(
f_url, video_id, mpd_id=kind, fatal=False))
elif kind == 'silverlight':
# TODO: process when ism is supported (see
# https://github.com/rg3/youtube-dl/issues/8118)
continue
else:
tbr = float_or_none(f.get('Bitrate'), 1000)
formats.append({
'url': f_url,
'format_id': '%s-%d' % (kind, tbr) if tbr else kind,
'tbr': tbr,
})
self._sort_formats(formats)
description = media_info.get('Description')
video_id = media_info.get('VideoId') or video_id
timestamp = parse_iso8601(media_info.get('PublishDate'))
thumbnails = [{
'url': thumbnail['Url'],
'width': int_or_none(thumbnail.get('Size')),
} for thumbnail in (media_info.get('Poster') or []) if thumbnail.get('Url')]
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'is_live': is_live,
'thumbnails': thumbnails,
'formats': formats,
}
| unlicense |
benjaminoh1/tensorflowcookbook | Chapter 07/bag_of_words.py | 1 | 6082 | # Working with Bag of Words
#---------------------------------------
#
# In this example, we will download and preprocess the ham/spam
# text data. We will then use a one-hot-encoding to make a
# bag of words set of features to use in logistic regression.
#
# We will use these one-hot-vectors for logistic regression to
# predict if a text is spam or ham.
import tensorflow as tf
import matplotlib.pyplot as plt
import os
import numpy as np
import csv
import string
import requests
import io
from zipfile import ZipFile
from tensorflow.contrib import learn
from tensorflow.python.framework import ops
ops.reset_default_graph()
# Start a graph session
sess = tf.Session()
# Check if data was downloaded, otherwise download it and save for future use
save_file_name = os.path.join('temp','temp_spam_data.csv')
if os.path.isfile(save_file_name):
text_data = []
with open(save_file_name, 'r') as temp_output_file:
reader = csv.reader(temp_output_file)
for row in reader:
text_data.append(row)
else:
zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
r = requests.get(zip_url)
z = ZipFile(io.BytesIO(r.content))
file = z.read('SMSSpamCollection')
# Format Data
text_data = file.decode()
text_data = text_data.encode('ascii',errors='ignore')
text_data = text_data.decode().split('\n')
text_data = [x.split('\t') for x in text_data if len(x)>=1]
# And write to csv
with open(save_file_name, 'w') as temp_output_file:
writer = csv.writer(temp_output_file)
writer.writerows(text_data)
texts = [x[1] for x in text_data]
target = [x[0] for x in text_data]
# Relabel 'spam' as 1, 'ham' as 0
target = [1 if x=='spam' else 0 for x in target]
# Normalize text
# Lower case
texts = [x.lower() for x in texts]
# Remove punctuation
texts = [''.join(c for c in x if c not in string.punctuation) for x in texts]
# Remove numbers
texts = [''.join(c for c in x if c not in '0123456789') for x in texts]
# Trim extra whitespace
texts = [' '.join(x.split()) for x in texts]
# Plot histogram of text lengths
text_lengths = [len(x.split()) for x in texts]
text_lengths = [x for x in text_lengths if x < 50]
plt.hist(text_lengths, bins=25)
plt.title('Histogram of # of Words in Texts')
# Choose max text word length at 25
sentence_size = 25
min_word_freq = 3
# Setup vocabulary processor
vocab_processor = learn.preprocessing.VocabularyProcessor(sentence_size, min_frequency=min_word_freq)
# Have to fit transform to get length of unique words.
vocab_processor.fit_transform(texts)
embedding_size = len(vocab_processor.vocabulary_)
# Split up data set into train/test
train_indices = np.random.choice(len(texts), round(len(texts)*0.8), replace=False)
test_indices = np.array(list(set(range(len(texts))) - set(train_indices)))
texts_train = [x for ix, x in enumerate(texts) if ix in train_indices]
texts_test = [x for ix, x in enumerate(texts) if ix in test_indices]
target_train = [x for ix, x in enumerate(target) if ix in train_indices]
target_test = [x for ix, x in enumerate(target) if ix in test_indices]
# Setup Index Matrix for one-hot-encoding
identity_mat = tf.diag(tf.ones(shape=[embedding_size]))
# Create variables for logistic regression
A = tf.Variable(tf.random_normal(shape=[embedding_size,1]))
b = tf.Variable(tf.random_normal(shape=[1,1]))
# Initialize placeholders
x_data = tf.placeholder(shape=[sentence_size], dtype=tf.int32)
y_target = tf.placeholder(shape=[1, 1], dtype=tf.float32)
# Text-Vocab Embedding
x_embed = tf.nn.embedding_lookup(identity_mat, x_data)
x_col_sums = tf.reduce_sum(x_embed, 0)
# Declare model operations
x_col_sums_2D = tf.expand_dims(x_col_sums, 0)
model_output = tf.add(tf.matmul(x_col_sums_2D, A), b)
# Declare loss function (Cross Entropy loss)
loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(model_output, y_target))
# Prediction operation
prediction = tf.sigmoid(model_output)
# Declare optimizer
my_opt = tf.train.GradientDescentOptimizer(0.001)
train_step = my_opt.minimize(loss)
# Intitialize Variables
init = tf.initialize_all_variables()
sess.run(init)
# Start Logistic Regression
print('Starting Training Over {} Sentences.'.format(len(texts_train)))
loss_vec = []
train_acc_all = []
train_acc_avg = []
for ix, t in enumerate(vocab_processor.fit_transform(texts_train)):
y_data = [[target_train[ix]]]
sess.run(train_step, feed_dict={x_data: t, y_target: y_data})
temp_loss = sess.run(loss, feed_dict={x_data: t, y_target: y_data})
loss_vec.append(temp_loss)
if (ix+1)%10==0:
print('Training Observation #' + str(ix+1) + ': Loss = ' + str(temp_loss))
# Keep trailing average of past 50 observations accuracy
# Get prediction of single observation
[[temp_pred]] = sess.run(prediction, feed_dict={x_data:t, y_target:y_data})
# Get True/False if prediction is accurate
train_acc_temp = target_train[ix]==np.round(temp_pred)
train_acc_all.append(train_acc_temp)
if len(train_acc_all) >= 50:
train_acc_avg.append(np.mean(train_acc_all[-50:]))
# Get test set accuracy
print('Getting Test Set Accuracy For {} Sentences.'.format(len(texts_test)))
test_acc_all = []
for ix, t in enumerate(vocab_processor.fit_transform(texts_test)):
y_data = [[target_test[ix]]]
if (ix+1)%50==0:
print('Test Observation #' + str(ix+1))
# Keep trailing average of past 50 observations accuracy
# Get prediction of single observation
[[temp_pred]] = sess.run(prediction, feed_dict={x_data:t, y_target:y_data})
# Get True/False if prediction is accurate
test_acc_temp = target_test[ix]==np.round(temp_pred)
test_acc_all.append(test_acc_temp)
print('\nOverall Test Accuracy: {}'.format(np.mean(test_acc_all)))
# Plot training accuracy over time
plt.plot(range(len(train_acc_avg)), train_acc_avg, 'k-', label='Train Accuracy')
plt.title('Avg Training Acc Over Past 50 Generations')
plt.xlabel('Generation')
plt.ylabel('Training Accuracy')
plt.show() | mit |
jcpowermac/ansible | lib/ansible/plugins/lookup/dict.py | 31 | 1955 | # (c) 2014, Kent R. Spillner <kspillner@acm.org>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: dict
version_added: "1.5"
short_description: returns key/value pair items from dictionaries
description:
- Takes dictionaries as input and returns a list with each item in the list being a dictionary with 'key' and 'value' as
keys to the previous dictionary's structure.
options:
_terms:
description:
- A list of dictionaries
required: True
"""
EXAMPLES = """
tasks:
- name: show dictionary
debug: msg="{{item.key}}: {{item.value}}"
with_dict: {a: 1, b: 2, c: 3}a
# with predefined vars
vars:
users:
alice:
name: Alice Appleworth
telephone: 123-456-7890
bob:
name: Bob Bananarama
telephone: 987-654-3210
tasks:
- name: Print phone records
debug:
msg: "User {{ item.key }} is {{ item.value.name }} ({{ item.value.telephone }})"
loop: "{{ lookup('dict', users) }}"
"""
RETURN = """
_list:
description:
- list of composed dictonaries with key and value
type: list
"""
import collections
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
# FIXME: can remove once with_ special case is removed
if not isinstance(terms, list):
terms = [terms]
results = []
for term in terms:
# Expect any type of Mapping, notably hostvars
if not isinstance(term, collections.Mapping):
raise AnsibleError("with_dict expects a dict")
results.extend(self._flatten_hash_to_list(term))
return results
| gpl-3.0 |
itskewpie/tempest | tempest/services/identity/v3/json/policy_client.py | 6 | 3011 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
from urlparse import urlparse
from tempest.common.rest_client import RestClient
class PolicyClientJSON(RestClient):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(PolicyClientJSON, self).__init__(config, username, password,
auth_url, tenant_name)
self.service = self.config.identity.catalog_type
self.endpoint_url = 'adminURL'
def request(self, method, url, headers=None, body=None, wait=None):
"""Overriding the existing HTTP request in super class rest_client."""
self._set_auth()
self.base_url = self.base_url.replace(urlparse(self.base_url).path,
"/v3")
return super(PolicyClientJSON, self).request(method, url,
headers=headers,
body=body)
def create_policy(self, blob, type):
"""Creates a Policy."""
post_body = {
"blob": blob,
"type": type
}
post_body = json.dumps({'policy': post_body})
resp, body = self.post('policies', post_body, self.headers)
body = json.loads(body)
return resp, body['policy']
def list_policies(self):
"""Lists the policies."""
resp, body = self.get('policies')
body = json.loads(body)
return resp, body['policies']
def get_policy(self, policy_id):
"""Lists out the given policy."""
url = 'policies/%s' % policy_id
resp, body = self.get(url)
body = json.loads(body)
return resp, body['policy']
def update_policy(self, policy_id, **kwargs):
"""Updates a policy."""
resp, body = self.get_policy(policy_id)
type = kwargs.get('type')
post_body = {
'type': type
}
post_body = json.dumps({'policy': post_body})
url = 'policies/%s' % policy_id
resp, body = self.patch(url, post_body,
self.headers)
body = json.loads(body)
return resp, body['policy']
def delete_policy(self, policy_id):
"""Deletes the policy."""
url = "policies/%s" % policy_id
return self.delete(url)
| apache-2.0 |
antiface/ThinkBayes2 | code/cookie3.py | 1 | 1095 | """This file contains code for use with "Think Bayes",
by Allen B. Downey, available from greenteapress.com
Copyright 2014 Allen B. Downey
License: GNU GPLv3 http://www.gnu.org/licenses/gpl.html
"""
from __future__ import print_function, division
import thinkbayes2
class Cookie(thinkbayes2.Suite):
"""A map from string bowl ID to probablity."""
def Likelihood(self, data, hypo):
"""The likelihood of the data under the hypothesis.
data: string cookie type
hypo: string bowl ID
"""
like = hypo[data] / hypo.Total()
if like:
hypo[data] -= 1
return like
def main():
bowl1 = thinkbayes2.Hist(dict(vanilla=30, chocolate=10))
bowl2 = thinkbayes2.Hist(dict(vanilla=20, chocolate=20))
pmf = Cookie([bowl1, bowl2])
print('After 1 vanilla')
pmf.Update('vanilla')
for hypo, prob in pmf.Items():
print(hypo, prob)
print('\nAfter 1 vanilla, 1 chocolate')
pmf.Update('chocolate')
for hypo, prob in pmf.Items():
print(hypo, prob)
if __name__ == '__main__':
main()
| gpl-2.0 |
michaelgugino/turbo-lister | sqlalchemy/engine/interfaces.py | 2 | 26596 | # engine/interfaces.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Define core interfaces used by the engine system."""
from .. import util, event
# backwards compat
from ..sql.compiler import Compiled, TypeCompiler
class Dialect(object):
"""Define the behavior of a specific database and DB-API combination.
Any aspect of metadata definition, SQL query generation,
execution, result-set handling, or anything else which varies
between databases is defined under the general category of the
Dialect. The Dialect acts as a factory for other
database-specific object implementations including
ExecutionContext, Compiled, DefaultGenerator, and TypeEngine.
All Dialects implement the following attributes:
name
identifying name for the dialect from a DBAPI-neutral point of view
(i.e. 'sqlite')
driver
identifying name for the dialect's DBAPI
positional
True if the paramstyle for this Dialect is positional.
paramstyle
the paramstyle to be used (some DB-APIs support multiple
paramstyles).
convert_unicode
True if Unicode conversion should be applied to all ``str``
types.
encoding
type of encoding to use for unicode, usually defaults to
'utf-8'.
statement_compiler
a :class:`.Compiled` class used to compile SQL statements
ddl_compiler
a :class:`.Compiled` class used to compile DDL statements
server_version_info
a tuple containing a version number for the DB backend in use.
This value is only available for supporting dialects, and is
typically populated during the initial connection to the database.
default_schema_name
the name of the default schema. This value is only available for
supporting dialects, and is typically populated during the
initial connection to the database.
execution_ctx_cls
a :class:`.ExecutionContext` class used to handle statement execution
execute_sequence_format
either the 'tuple' or 'list' type, depending on what cursor.execute()
accepts for the second argument (they vary).
preparer
a :class:`~sqlalchemy.sql.compiler.IdentifierPreparer` class used to
quote identifiers.
supports_alter
``True`` if the database supports ``ALTER TABLE``.
max_identifier_length
The maximum length of identifier names.
supports_unicode_statements
Indicate whether the DB-API can receive SQL statements as Python
unicode strings
supports_unicode_binds
Indicate whether the DB-API can receive string bind parameters
as Python unicode strings
supports_sane_rowcount
Indicate whether the dialect properly implements rowcount for
``UPDATE`` and ``DELETE`` statements.
supports_sane_multi_rowcount
Indicate whether the dialect properly implements rowcount for
``UPDATE`` and ``DELETE`` statements when executed via
executemany.
preexecute_autoincrement_sequences
True if 'implicit' primary key functions must be executed separately
in order to get their value. This is currently oriented towards
Postgresql.
implicit_returning
use RETURNING or equivalent during INSERT execution in order to load
newly generated primary keys and other column defaults in one execution,
which are then available via inserted_primary_key.
If an insert statement has returning() specified explicitly,
the "implicit" functionality is not used and inserted_primary_key
will not be available.
dbapi_type_map
A mapping of DB-API type objects present in this Dialect's
DB-API implementation mapped to TypeEngine implementations used
by the dialect.
This is used to apply types to result sets based on the DB-API
types present in cursor.description; it only takes effect for
result sets against textual statements where no explicit
typemap was present.
colspecs
A dictionary of TypeEngine classes from sqlalchemy.types mapped
to subclasses that are specific to the dialect class. This
dictionary is class-level only and is not accessed from the
dialect instance itself.
supports_default_values
Indicates if the construct ``INSERT INTO tablename DEFAULT
VALUES`` is supported
supports_sequences
Indicates if the dialect supports CREATE SEQUENCE or similar.
sequences_optional
If True, indicates if the "optional" flag on the Sequence() construct
should signal to not generate a CREATE SEQUENCE. Applies only to
dialects that support sequences. Currently used only to allow Postgresql
SERIAL to be used on a column that specifies Sequence() for usage on
other backends.
supports_native_enum
Indicates if the dialect supports a native ENUM construct.
This will prevent types.Enum from generating a CHECK
constraint when that type is used.
supports_native_boolean
Indicates if the dialect supports a native boolean construct.
This will prevent types.Boolean from generating a CHECK
constraint when that type is used.
"""
def create_connect_args(self, url):
"""Build DB-API compatible connection arguments.
Given a :class:`~sqlalchemy.engine.url.URL` object, returns a tuple
consisting of a `*args`/`**kwargs` suitable to send directly
to the dbapi's connect function.
"""
raise NotImplementedError()
@classmethod
def type_descriptor(cls, typeobj):
"""Transform a generic type to a dialect-specific type.
Dialect classes will usually use the
:func:`.types.adapt_type` function in the types module to
accomplish this.
The returned result is cached *per dialect class* so can
contain no dialect-instance state.
"""
raise NotImplementedError()
def initialize(self, connection):
"""Called during strategized creation of the dialect with a
connection.
Allows dialects to configure options based on server version info or
other properties.
The connection passed here is a SQLAlchemy Connection object,
with full capabilities.
The initalize() method of the base dialect should be called via
super().
"""
pass
def reflecttable(self, connection, table, include_columns, exclude_columns):
"""Load table description from the database.
Given a :class:`.Connection` and a
:class:`~sqlalchemy.schema.Table` object, reflect its columns and
properties from the database.
The implementation of this method is provided by
:meth:`.DefaultDialect.reflecttable`, which makes use of
:class:`.Inspector` to retrieve column information.
Dialects should **not** seek to implement this method, and should
instead implement individual schema inspection operations such as
:meth:`.Dialect.get_columns`, :meth:`.Dialect.get_pk_constraint`,
etc.
"""
raise NotImplementedError()
def get_columns(self, connection, table_name, schema=None, **kw):
"""Return information about columns in `table_name`.
Given a :class:`.Connection`, a string
`table_name`, and an optional string `schema`, return column
information as a list of dictionaries with these keys:
name
the column's name
type
[sqlalchemy.types#TypeEngine]
nullable
boolean
default
the column's default value
autoincrement
boolean
sequence
a dictionary of the form
{'name' : str, 'start' :int, 'increment': int}
Additional column attributes may be present.
"""
raise NotImplementedError()
def get_primary_keys(self, connection, table_name, schema=None, **kw):
"""Return information about primary keys in `table_name`.
Deprecated. This method is only called by the default
implementation of :meth:`.Dialect.get_pk_constraint`. Dialects should
instead implement the :meth:`.Dialect.get_pk_constraint` method directly.
"""
raise NotImplementedError()
def get_pk_constraint(self, connection, table_name, schema=None, **kw):
"""Return information about the primary key constraint on
table_name`.
Given a :class:`.Connection`, a string
`table_name`, and an optional string `schema`, return primary
key information as a dictionary with these keys:
constrained_columns
a list of column names that make up the primary key
name
optional name of the primary key constraint.
"""
raise NotImplementedError()
def get_foreign_keys(self, connection, table_name, schema=None, **kw):
"""Return information about foreign_keys in `table_name`.
Given a :class:`.Connection`, a string
`table_name`, and an optional string `schema`, return foreign
key information as a list of dicts with these keys:
name
the constraint's name
constrained_columns
a list of column names that make up the foreign key
referred_schema
the name of the referred schema
referred_table
the name of the referred table
referred_columns
a list of column names in the referred table that correspond to
constrained_columns
"""
raise NotImplementedError()
def get_table_names(self, connection, schema=None, **kw):
"""Return a list of table names for `schema`."""
raise NotImplementedError
def get_view_names(self, connection, schema=None, **kw):
"""Return a list of all view names available in the database.
schema:
Optional, retrieve names from a non-default schema.
"""
raise NotImplementedError()
def get_view_definition(self, connection, view_name, schema=None, **kw):
"""Return view definition.
Given a :class:`.Connection`, a string
`view_name`, and an optional string `schema`, return the view
definition.
"""
raise NotImplementedError()
def get_indexes(self, connection, table_name, schema=None, **kw):
"""Return information about indexes in `table_name`.
Given a :class:`.Connection`, a string
`table_name` and an optional string `schema`, return index
information as a list of dictionaries with these keys:
name
the index's name
column_names
list of column names in order
unique
boolean
"""
raise NotImplementedError()
def get_unique_constraints(self, table_name, schema=None, **kw):
"""Return information about unique constraints in `table_name`.
Given a string `table_name` and an optional string `schema`, return
unique constraint information as a list of dicts with these keys:
name
the unique constraint's name
column_names
list of column names in order
\**kw
other options passed to the dialect's get_unique_constraints() method.
.. versionadded:: 0.9.0
"""
raise NotImplementedError()
def normalize_name(self, name):
"""convert the given name to lowercase if it is detected as
case insensitive.
this method is only used if the dialect defines
requires_name_normalize=True.
"""
raise NotImplementedError()
def denormalize_name(self, name):
"""convert the given name to a case insensitive identifier
for the backend if it is an all-lowercase name.
this method is only used if the dialect defines
requires_name_normalize=True.
"""
raise NotImplementedError()
def has_table(self, connection, table_name, schema=None):
"""Check the existence of a particular table in the database.
Given a :class:`.Connection` object and a string
`table_name`, return True if the given table (possibly within
the specified `schema`) exists in the database, False
otherwise.
"""
raise NotImplementedError()
def has_sequence(self, connection, sequence_name, schema=None):
"""Check the existence of a particular sequence in the database.
Given a :class:`.Connection` object and a string
`sequence_name`, return True if the given sequence exists in
the database, False otherwise.
"""
raise NotImplementedError()
def _get_server_version_info(self, connection):
"""Retrieve the server version info from the given connection.
This is used by the default implementation to populate the
"server_version_info" attribute and is called exactly
once upon first connect.
"""
raise NotImplementedError()
def _get_default_schema_name(self, connection):
"""Return the string name of the currently selected schema from
the given connection.
This is used by the default implementation to populate the
"default_schema_name" attribute and is called exactly
once upon first connect.
"""
raise NotImplementedError()
def do_begin(self, dbapi_connection):
"""Provide an implementation of ``connection.begin()``, given a
DB-API connection.
The DBAPI has no dedicated "begin" method and it is expected
that transactions are implicit. This hook is provided for those
DBAPIs that might need additional help in this area.
Note that :meth:`.Dialect.do_begin` is not called unless a
:class:`.Transaction` object is in use. The
:meth:`.Dialect.do_autocommit`
hook is provided for DBAPIs that need some extra commands emitted
after a commit in order to enter the next transaction, when the
SQLAlchemy :class:`.Connection` is used in it's default "autocommit"
mode.
:param dbapi_connection: a DBAPI connection, typically
proxied within a :class:`.ConnectionFairy`.
"""
raise NotImplementedError()
def do_rollback(self, dbapi_connection):
"""Provide an implementation of ``connection.rollback()``, given
a DB-API connection.
:param dbapi_connection: a DBAPI connection, typically
proxied within a :class:`.ConnectionFairy`.
"""
raise NotImplementedError()
def do_commit(self, dbapi_connection):
"""Provide an implementation of ``connection.commit()``, given a
DB-API connection.
:param dbapi_connection: a DBAPI connection, typically
proxied within a :class:`.ConnectionFairy`.
"""
raise NotImplementedError()
def do_close(self, dbapi_connection):
"""Provide an implementation of ``connection.close()``, given a DBAPI
connection.
This hook is called by the :class:`.Pool` when a connection has been
detached from the pool, or is being returned beyond the normal
capacity of the pool.
.. versionadded:: 0.8
"""
raise NotImplementedError()
def create_xid(self):
"""Create a two-phase transaction ID.
This id will be passed to do_begin_twophase(),
do_rollback_twophase(), do_commit_twophase(). Its format is
unspecified.
"""
raise NotImplementedError()
def do_savepoint(self, connection, name):
"""Create a savepoint with the given name.
:param connection: a :class:`.Connection`.
:param name: savepoint name.
"""
raise NotImplementedError()
def do_rollback_to_savepoint(self, connection, name):
"""Rollback a connection to the named savepoint.
:param connection: a :class:`.Connection`.
:param name: savepoint name.
"""
raise NotImplementedError()
def do_release_savepoint(self, connection, name):
"""Release the named savepoint on a connection.
:param connection: a :class:`.Connection`.
:param name: savepoint name.
"""
raise NotImplementedError()
def do_begin_twophase(self, connection, xid):
"""Begin a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
:param xid: xid
"""
raise NotImplementedError()
def do_prepare_twophase(self, connection, xid):
"""Prepare a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
:param xid: xid
"""
raise NotImplementedError()
def do_rollback_twophase(self, connection, xid, is_prepared=True,
recover=False):
"""Rollback a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
:param xid: xid
:param is_prepared: whether or not
:meth:`.TwoPhaseTransaction.prepare` was called.
:param recover: if the recover flag was passed.
"""
raise NotImplementedError()
def do_commit_twophase(self, connection, xid, is_prepared=True,
recover=False):
"""Commit a two phase transaction on the given connection.
:param connection: a :class:`.Connection`.
:param xid: xid
:param is_prepared: whether or not
:meth:`.TwoPhaseTransaction.prepare` was called.
:param recover: if the recover flag was passed.
"""
raise NotImplementedError()
def do_recover_twophase(self, connection):
"""Recover list of uncommited prepared two phase transaction
identifiers on the given connection.
:param connection: a :class:`.Connection`.
"""
raise NotImplementedError()
def do_executemany(self, cursor, statement, parameters, context=None):
"""Provide an implementation of ``cursor.executemany(statement,
parameters)``."""
raise NotImplementedError()
def do_execute(self, cursor, statement, parameters, context=None):
"""Provide an implementation of ``cursor.execute(statement,
parameters)``."""
raise NotImplementedError()
def do_execute_no_params(self, cursor, statement, parameters,
context=None):
"""Provide an implementation of ``cursor.execute(statement)``.
The parameter collection should not be sent.
"""
raise NotImplementedError()
def is_disconnect(self, e, connection, cursor):
"""Return True if the given DB-API error indicates an invalid
connection"""
raise NotImplementedError()
def connect(self):
"""return a callable which sets up a newly created DBAPI connection.
The callable accepts a single argument "conn" which is the
DBAPI connection itself. It has no return value.
This is used to set dialect-wide per-connection options such as
isolation modes, unicode modes, etc.
If a callable is returned, it will be assembled into a pool listener
that receives the direct DBAPI connection, with all wrappers removed.
If None is returned, no listener will be generated.
"""
return None
def reset_isolation_level(self, dbapi_conn):
"""Given a DBAPI connection, revert its isolation to the default."""
raise NotImplementedError()
def set_isolation_level(self, dbapi_conn, level):
"""Given a DBAPI connection, set its isolation level."""
raise NotImplementedError()
def get_isolation_level(self, dbapi_conn):
"""Given a DBAPI connection, return its isolation level."""
raise NotImplementedError()
class ExecutionContext(object):
"""A messenger object for a Dialect that corresponds to a single
execution.
ExecutionContext should have these data members:
connection
Connection object which can be freely used by default value
generators to execute SQL. This Connection should reference the
same underlying connection/transactional resources of
root_connection.
root_connection
Connection object which is the source of this ExecutionContext. This
Connection may have close_with_result=True set, in which case it can
only be used once.
dialect
dialect which created this ExecutionContext.
cursor
DB-API cursor procured from the connection,
compiled
if passed to constructor, sqlalchemy.engine.base.Compiled object
being executed,
statement
string version of the statement to be executed. Is either
passed to the constructor, or must be created from the
sql.Compiled object by the time pre_exec() has completed.
parameters
bind parameters passed to the execute() method. For compiled
statements, this is a dictionary or list of dictionaries. For
textual statements, it should be in a format suitable for the
dialect's paramstyle (i.e. dict or list of dicts for non
positional, list or list of lists/tuples for positional).
isinsert
True if the statement is an INSERT.
isupdate
True if the statement is an UPDATE.
should_autocommit
True if the statement is a "committable" statement.
prefetch_cols
a list of Column objects for which a client-side default
was fired off. Applies to inserts and updates.
postfetch_cols
a list of Column objects for which a server-side default or
inline SQL expression value was fired off. Applies to inserts
and updates.
"""
def create_cursor(self):
"""Return a new cursor generated from this ExecutionContext's
connection.
Some dialects may wish to change the behavior of
connection.cursor(), such as postgresql which may return a PG
"server side" cursor.
"""
raise NotImplementedError()
def pre_exec(self):
"""Called before an execution of a compiled statement.
If a compiled statement was passed to this ExecutionContext,
the `statement` and `parameters` datamembers must be
initialized after this statement is complete.
"""
raise NotImplementedError()
def post_exec(self):
"""Called after the execution of a compiled statement.
If a compiled statement was passed to this ExecutionContext,
the `last_insert_ids`, `last_inserted_params`, etc.
datamembers should be available after this method completes.
"""
raise NotImplementedError()
def result(self):
"""Return a result object corresponding to this ExecutionContext.
Returns a ResultProxy.
"""
raise NotImplementedError()
def handle_dbapi_exception(self, e):
"""Receive a DBAPI exception which occurred upon execute, result
fetch, etc."""
raise NotImplementedError()
def should_autocommit_text(self, statement):
"""Parse the given textual statement and return True if it refers to
a "committable" statement"""
raise NotImplementedError()
def lastrow_has_defaults(self):
"""Return True if the last INSERT or UPDATE row contained
inlined or database-side defaults.
"""
raise NotImplementedError()
def get_rowcount(self):
"""Return the DBAPI ``cursor.rowcount`` value, or in some
cases an interpreted value.
See :attr:`.ResultProxy.rowcount` for details on this.
"""
raise NotImplementedError()
class Connectable(object):
"""Interface for an object which supports execution of SQL constructs.
The two implementations of :class:`.Connectable` are
:class:`.Connection` and :class:`.Engine`.
Connectable must also implement the 'dialect' member which references a
:class:`.Dialect` instance.
"""
def connect(self, **kwargs):
"""Return a :class:`.Connection` object.
Depending on context, this may be ``self`` if this object
is already an instance of :class:`.Connection`, or a newly
procured :class:`.Connection` if this object is an instance
of :class:`.Engine`.
"""
def contextual_connect(self):
"""Return a :class:`.Connection` object which may be part of an ongoing
context.
Depending on context, this may be ``self`` if this object
is already an instance of :class:`.Connection`, or a newly
procured :class:`.Connection` if this object is an instance
of :class:`.Engine`.
"""
raise NotImplementedError()
@util.deprecated("0.7",
"Use the create() method on the given schema "
"object directly, i.e. :meth:`.Table.create`, "
":meth:`.Index.create`, :meth:`.MetaData.create_all`")
def create(self, entity, **kwargs):
"""Emit CREATE statements for the given schema entity."""
raise NotImplementedError()
@util.deprecated("0.7",
"Use the drop() method on the given schema "
"object directly, i.e. :meth:`.Table.drop`, "
":meth:`.Index.drop`, :meth:`.MetaData.drop_all`")
def drop(self, entity, **kwargs):
"""Emit DROP statements for the given schema entity."""
raise NotImplementedError()
def execute(self, object, *multiparams, **params):
"""Executes the given construct and returns a :class:`.ResultProxy`."""
raise NotImplementedError()
def scalar(self, object, *multiparams, **params):
"""Executes and returns the first column of the first row.
The underlying cursor is closed after execution.
"""
raise NotImplementedError()
def _run_visitor(self, visitorcallable, element,
**kwargs):
raise NotImplementedError()
def _execute_clauseelement(self, elem, multiparams=None, params=None):
raise NotImplementedError()
| gpl-3.0 |
dvro/scikit-protopy | protopy/base.py | 1 | 4528 | """Base and mixin classes for instance reduction techniques"""
# Author: Dayvid Victor <dvro@cin.ufpe.br>
# License: BSD Style
import warnings
from abc import ABCMeta, abstractmethod
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.neighbors.classification import KNeighborsClassifier
from sklearn.utils import check_array
from sklearn.externals import six
class InstanceReductionWarning(UserWarning):
pass
# Make sure that NeighborsWarning are displayed more than once
warnings.simplefilter("always", InstanceReductionWarning)
class InstanceReductionBase(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for instance reduction estimators."""
@abstractmethod
def __init__(self):
pass
class InstanceReductionMixin(InstanceReductionBase, ClassifierMixin):
"""Mixin class for all instance reduction techniques"""
def set_classifier(self):
"""Sets the classified to be used in the instance reduction process
and classification.
Parameters
----------
classifier : classifier, following the KNeighborsClassifier style
(default = KNN)
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
P : array-like, shape = [indeterminated, n_features]
Resulting training set.
q : array-like, shape = [indertaminated]
Labels for P
"""
self.classifier = classifier
def reduce_data(self, X, y):
"""Perform the instance reduction procedure on the given training data.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Training set.0
y : array-like, shape = [n_samples]
Labels for X.
Returns
-------
X_ : array-like, shape = [indeterminated, n_features]
Resulting training set.
y_ : array-like, shape = [indertaminated]
Labels for X_
"""
pass
def fit(self, X, y, reduce_data=True):
"""
Fit the InstanceReduction model according to the given training data.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
Note that centroid shrinking cannot be used with sparse matrices.
y : array, shape = [n_samples]
Target values (integers)
reduce_data : bool, flag indicating if the reduction would be performed
"""
self.X = X
self.y = y
if reduce_data:
self.reduce_data(X, y)
return self
def predict(self, X, n_neighbors=1):
"""Perform classification on an array of test vectors X.
The predicted class C for each sample in X is returned.
Parameters
----------
X : array-like, shape = [n_samples, n_features]
Returns
-------
C : array, shape = [n_samples]
Notes
-----
The default prediction is using KNeighborsClassifier, if the
instance reducition algorithm is to be performed with another
classifier, it should be explicited overwritten and explained
in the documentation.
"""
X = check_array(X)
if not hasattr(self, "X_") or self.X_ is None:
raise AttributeError("Model has not been trained yet.")
if not hasattr(self, "y_") or self.y_ is None:
raise AttributeError("Model has not been trained yet.")
if self.classifier == None:
self.classifier = KNeighborsClassifier(n_neighbors=n_neighbors)
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict(X)
def predict_proba(self, X):
"""Return probability estimates for the test data X.
after a given prototype selection algorithm.
Parameters
----------
X : array, shape = (n_samples, n_features)
A 2-D array representing the test points.
Returns
-------
p : array of shape = [n_samples, n_classes], or a list of n_outputs
of such arrays if n_outputs > 1.
The class probabilities of the input samples. Classes are ordered
by lexicographic order.
"""
self.classifier.fit(self.X_, self.y_)
return self.classifier.predict_proba(X)
| bsd-2-clause |
a-nai/django-wiki | wiki/plugins/attachments/markdown_extensions.py | 10 | 3417 | from __future__ import unicode_literals
from __future__ import absolute_import
import markdown
import re
from django.core.urlresolvers import reverse
from django.template.context import Context
from django.template.loader import render_to_string
from django.contrib.auth.models import AnonymousUser
from wiki.core.permissions import can_read
ATTACHMENT_RE = re.compile(
r'(?P<before>.*)(\[attachment\:(?P<id>\d+)\])(?P<after>.*)',
re.IGNORECASE)
from wiki.plugins.attachments import models
class AttachmentExtension(markdown.Extension):
""" Abbreviation Extension for Python-Markdown. """
def extendMarkdown(self, md, md_globals):
""" Insert AbbrPreprocessor before ReferencePreprocessor. """
md.preprocessors.add(
'dw-attachments',
AttachmentPreprocessor(md),
'>html_block')
class AttachmentPreprocessor(markdown.preprocessors.Preprocessor):
"""django-wiki attachment preprocessor - parse text for [attachment:id] references. """
def run(self, lines):
new_text = []
for line in lines:
m = ATTACHMENT_RE.match(line)
if m:
attachment_id = m.group('id').strip()
before = self.run([m.group('before')])[0]
after = self.run([m.group('after')])[0]
try:
attachment = models.Attachment.objects.get(
articles__current_revision__deleted=False,
id=attachment_id, current_revision__deleted=False,
articles=self.markdown.article
)
url = reverse(
'wiki:attachments_download',
kwargs={
'article_id': self.markdown.article.id,
'attachment_id': attachment.id,
})
# The readability of the attachment is decided relative
# to the owner of the original article.
# I.e. do not insert attachments in other articles that
# the original uploader cannot read, that would be out
# of scope!
article_owner = attachment.article.owner
if not article_owner:
article_owner = AnonymousUser()
attachment_can_read = can_read(
self.markdown.article, article_owner)
html = render_to_string(
"wiki/plugins/attachments/render.html",
Context({
'url': url,
'filename': attachment.original_filename,
'attachment_can_read': attachment_can_read,
}))
line = self.markdown.htmlStash.store(html, safe=True)
except models.Attachment.DoesNotExist:
html = """<span class="attachment attachment-deleted">Attachment with ID #%s is deleted.</span>""" % attachment_id
line = line.replace(
m.group(2),
self.markdown.htmlStash.store(
html,
safe=True))
line = before + line + after
new_text.append(line)
return new_text
| gpl-3.0 |
AnthonyCAS/code-for-blog | 2009/pygame_creeps_game/pathfinder.py | 12 | 5242 | from priorityqueueset import PriorityQueueSet
class PathFinder(object):
""" Computes a path in a graph using the A* algorithm.
Initialize the object and then repeatedly compute_path to
get the path between a start point and an end point.
The points on a graph are required to be hashable and
comparable with __eq__. Other than that, they may be
represented as you wish, as long as the functions
supplied to the constructor know how to handle them.
"""
def __init__(self, successors, move_cost, heuristic_to_goal):
""" Create a new PathFinder. Provided with several
functions that represent your graph and the costs of
moving through it.
successors:
A function that receives a point as a single
argument and returns a list of "successor" points,
the points on the graph that can be reached from
the given point.
move_cost:
A function that receives two points as arguments
and returns the numeric cost of moving from the
first to the second.
heuristic_to_goal:
A function that receives a point and a goal point,
and returns the numeric heuristic estimation of
the cost of reaching the goal from the point.
"""
self.successors = successors
self.move_cost = move_cost
self.heuristic_to_goal = heuristic_to_goal
def compute_path(self, start, goal):
""" Compute the path between the 'start' point and the
'goal' point.
The path is returned as an iterator to the points,
including the start and goal points themselves.
If no path was found, an empty list is returned.
"""
#
# Implementation of the A* algorithm.
#
closed_set = {}
start_node = self._Node(start)
start_node.g_cost = 0
start_node.f_cost = self._compute_f_cost(start_node, goal)
open_set = PriorityQueueSet()
open_set.add(start_node)
while len(open_set) > 0:
# Remove and get the node with the lowest f_score from
# the open set
#
curr_node = open_set.pop_smallest()
if curr_node.coord == goal:
return self._reconstruct_path(curr_node)
closed_set[curr_node] = curr_node
for succ_coord in self.successors(curr_node.coord):
succ_node = self._Node(succ_coord)
succ_node.g_cost = self._compute_g_cost(curr_node, succ_node)
succ_node.f_cost = self._compute_f_cost(succ_node, goal)
if succ_node in closed_set:
continue
if open_set.add(succ_node):
succ_node.pred = curr_node
return []
########################## PRIVATE ##########################
def _compute_g_cost(self, from_node, to_node):
return (from_node.g_cost +
self.move_cost(from_node.coord, to_node.coord))
def _compute_f_cost(self, node, goal):
return node.g_cost + self._cost_to_goal(node, goal)
def _cost_to_goal(self, node, goal):
return self.heuristic_to_goal(node.coord, goal)
def _reconstruct_path(self, node):
""" Reconstructs the path to the node from the start node
(for which .pred is None)
"""
pth = [node.coord]
n = node
while n.pred:
n = n.pred
pth.append(n.coord)
return reversed(pth)
class _Node(object):
""" Used to represent a node on the searched graph during
the A* search.
Each Node has its coordinate (the point it represents),
a g_cost (the cumulative cost of reaching the point
from the start point), a f_cost (the estimated cost
from the start to the goal through this point) and
a predecessor Node (for path construction).
The Node is meant to be used inside PriorityQueueSet,
so it implements equality and hashinig (based on the
coordinate, which is assumed to be unique) and
comparison (based on f_cost) for sorting by cost.
"""
def __init__(self, coord, g_cost=None, f_cost=None, pred=None):
self.coord = coord
self.g_cost = g_cost
self.f_cost = f_cost
self.pred = pred
def __eq__(self, other):
return self.coord == other.coord
def __cmp__(self, other):
return cmp(self.f_cost, other.f_cost)
def __hash__(self):
return hash(self.coord)
def __str__(self):
return 'N(%s) -> g: %s, f: %s' % (self.coord, self.g_cost, self.f_cost)
def __repr__(self):
return self.__str__()
| unlicense |
ddurdle/OneDrive-for-KODI | resources/lib/cache.py | 3 | 6344 | '''
Copyright (C) 2014-2015 ddurdle
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
# cloudservice - standard modules
import os
# cloudservice - standard XBMC modules
import xbmcgui, xbmcvfs
#
#
#
class cache:
# CloudService v0.2.3
##
##
def __init__(self, package=None):
self.package = package
self.cachePath = ''
self.files = []
def setPackage(self, package):
self.package = package
def setSRT(self, service):
if self.cachePath == '':
cachePath = service.settings.cachePath
else:
cachePath = self.cachePath
if cachePath == '':
cachePath = xbmcgui.Dialog().browse(0,service.addon.getLocalizedString(30136), 'files','',False,False,'')
service.addon.setSetting('cache_folder', cachePath)
self.cachePath = cachePath
if cachePath != '':
cachePath = str(cachePath) + '/' + str(self.package.file.id)+'/'#+ '.'+str(lang)+'.srt'
if not xbmcvfs.exists(cachePath):
xbmcvfs.mkdir(cachePath)
srt = service.getSRT(self.package.file.title)
if srt:
for file in srt:
if not xbmcvfs.exists(cachePath + str(file[0])):
service.downloadPicture(file[1], cachePath + str(file[0]))
def setCC(self, service):
if self.cachePath == '':
cachePath = service.settings.cachePath
else:
cachePath = self.cachePath
if cachePath == '':
cachePath = xbmcgui.Dialog().browse(0,service.addon.getLocalizedString(30136), 'files','',False,False,'')
service.addon.setSetting('cache_folder', cachePath)
self.cachePath = cachePath
if cachePath != '':
cachePath = str(cachePath) + '/' + str(self.package.file.id)+'/'#+ '.'+str(lang)+'.srt'
if not xbmcvfs.exists(cachePath):
xbmcvfs.mkdir(cachePath)
cachePath = str(cachePath) + str(self.package.file.id)
cc = service.getTTS(self.package.file.srtURL)
if cc:
for file in cc:
if not xbmcvfs.exists(cachePath + str(file[0])):
service.downloadTTS(file[1], cachePath + str(file[0]))
def getSRT(self, service):
cc = []
dirs, files = xbmcvfs.listdir(service.settings.cachePath + '/'+ str(self.package.file.id) + '/')
for file in files:
if os.path.splitext(file)[1] == '.srt':
cc.append(service.settings.cachePath + '/'+ str(self.package.file.id) + '/' + file)
return cc
def setThumbnail(self, service, url=''):
if self.cachePath == '':
cachePath = service.settings.cachePath
else:
cachePath = self.cachePath
if cachePath == '':
cachePath = xbmcgui.Dialog().browse(0,service.addon.getLocalizedString(30136), 'files','',False,False,'')
service.addon.setSetting('cache_folder', cachePath)
self.cachePath = cachePath
if url == '':
url = self.package.file.thumbnail
#simply no thumbnail
if url == '':
return ""
cachePath = str(cachePath) + str(self.package.file.id) + '/'
if not xbmcvfs.exists(cachePath):
xbmcvfs.mkdir(cachePath)
if not xbmcvfs.exists(cachePath + str(self.package.file.id) + '.jpg'):
service.downloadPicture(url, cachePath + str(self.package.file.id) + '.jpg')
print url
return cachePath + str(self.package.file.id) + '.jpg'
def getThumbnail(self,service, url='', fileID=''):
if fileID == '':
if xbmcvfs.exists(str(self.cachePath) + str(self.package.file.id) + '/' + str(self.package.file.id) + '.jpg'):
return str(self.cachePath) + str(self.package.file.id) + '/' + str(self.package.file.id) + '.jpg'
else:
return self.package.file.thumbnail
else:
if xbmcvfs.exists(str(self.cachePath) + str(fileID) + '/' + str(fileID) + '.jpg'):
return str(self.cachePath) + str(fileID) + '/' + str(fileID) + '.jpg'
else:
return url + '|' + service.getHeadersEncoded()
def getFiles(self):
cachePath = self.cachePath + '/' + self.package.file.id
localResolutions = []
localFiles = []
if xbmcvfs.exists(cachePath):
dirs,files = xbmcvfs.listdir(cachePath)
for file in files:
if os.path.splitext(file)[1] == '.stream':
try:
resolutionFile = xbmcvfs.File(cachePath + '/' + str(file) + '.resolution')
resolution = resolutionFile.read()
resolutionFile.close()
except:
resolution = file
localResolutions.append('offline - ' + str(resolution))
localFiles.append(cachePath + '/' + file)
return (localResolutions,localFiles)
def getOfflineFileList(self, fileID):
localFiles = []
if xbmcvfs.exists(self.cachePath):
dirs,files = xbmcvfs.listdir(self.cachePath)
for file in files:
if os.path.splitext(file)[1] == '.stream':
try:
nameFile = xbmcvfs.File(cachePath + '/' + + str(fileID) + '/' + str(fileID) + '.name')
filename = nameFile.read()
nameFile.close()
except:
filename = file
localFiles.append(file)
return localFiles
| gpl-2.0 |
xiaotdl/ansible | lib/ansible/parsing/utils/addresses.py | 56 | 7976 | # Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
# Components that match a numeric or alphanumeric begin:end or begin:end:step
# range expression inside square brackets.
numeric_range = r'''
\[
(?:[0-9]+:[0-9]+) # numeric begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
hexadecimal_range = r'''
\[
(?:[0-9a-f]+:[0-9a-f]+) # hexadecimal begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
alphanumeric_range = r'''
\[
(?:
[a-z]:[a-z]| # one-char alphabetic range
[0-9]+:[0-9]+ # ...or a numeric one
)
(?::[0-9]+)? # numeric :step (optional)
\]
'''
# Components that match a 16-bit portion of an IPv6 address in hexadecimal
# notation (0..ffff) or an 8-bit portion of an IPv4 address in decimal notation
# (0..255) or an [x:y(:z)] numeric range.
ipv6_component = r'''
(?:
[0-9a-f]{{1,4}}| # 0..ffff
{range} # or a numeric range
)
'''.format(range=hexadecimal_range)
ipv4_component = r'''
(?:
[01]?[0-9]{{1,2}}| # 0..199
2[0-4][0-9]| # 200..249
25[0-5]| # 250..255
{range} # or a numeric range
)
'''.format(range=numeric_range)
# A hostname label, e.g. 'foo' in 'foo.example.com'. Consists of alphanumeric
# characters plus dashes (and underscores) or valid ranges. The label may not
# start or end with a hyphen or an underscore. This is interpolated into the
# hostname pattern below. We don't try to enforce the 63-char length limit.
label = r'''
(?:[\w]|{range}) # Starts with an alphanumeric or a range
(?:[\w_-]|{range})* # Then zero or more of the same or [_-]
(?<![_-]) # ...as long as it didn't end with [_-]
'''.format(range=alphanumeric_range)
patterns = {
# This matches a square-bracketed expression with a port specification. What
# is inside the square brackets is validated later.
'bracketed_hostport': re.compile(
r'''^
\[(.+)\] # [host identifier]
:([0-9]+) # :port number
$
''', re.X
),
# This matches a bare IPv4 address or hostname (or host pattern including
# [x:y(:z)] ranges) with a port specification.
'hostport': re.compile(
r'''^
((?: # We want to match:
[^:\[\]] # (a non-range character
| # ...or...
\[[^\]]*\] # a complete bracketed expression)
)*) # repeated as many times as possible
:([0-9]+) # followed by a port number
$
''', re.X
),
# This matches an IPv4 address, but also permits range expressions.
'ipv4': re.compile(
r'''^
(?:{i4}\.){{3}}{i4} # Three parts followed by dots plus one
$
'''.format(i4=ipv4_component), re.X|re.I
),
# This matches an IPv6 address, but also permits range expressions.
#
# This expression looks complex, but it really only spells out the various
# combinations in which the basic unit of an IPv6 address (0..ffff) can be
# written, from :: to 1:2:3:4:5:6:7:8, plus the IPv4-in-IPv6 variants such
# as ::ffff:192.0.2.3.
#
# Note that we can't just use ipaddress.ip_address() because we also have to
# accept ranges in place of each component.
'ipv6': re.compile(
r'''^
(?:{0}:){{7}}{0}| # uncompressed: 1:2:3:4:5:6:7:8
(?:{0}:){{1,6}}:| # compressed variants, which are all
(?:{0}:)(?::{0}){{1,6}}| # a::b for various lengths of a,b
(?:{0}:){{2}}(?::{0}){{1,5}}|
(?:{0}:){{3}}(?::{0}){{1,4}}|
(?:{0}:){{4}}(?::{0}){{1,3}}|
(?:{0}:){{5}}(?::{0}){{1,2}}|
(?:{0}:){{6}}(?::{0})| # ...all with 2 <= a+b <= 7
:(?::{0}){{1,6}}| # ::ffff(:ffff...)
{0}?::| # ffff::, ::
# ipv4-in-ipv6 variants
(?:0:){{6}}(?:{0}\.){{3}}{0}|
::(?:ffff:)?(?:{0}\.){{3}}{0}|
(?:0:){{5}}ffff:(?:{0}\.){{3}}{0}
$
'''.format(ipv6_component), re.X|re.I
),
# This matches a hostname or host pattern including [x:y(:z)] ranges.
#
# We roughly follow DNS rules here, but also allow ranges (and underscores).
# In the past, no systematic rules were enforced about inventory hostnames,
# but the parsing context (e.g. shlex.split(), fnmatch.fnmatch()) excluded
# various metacharacters anyway.
#
# We don't enforce DNS length restrictions here (63 characters per label,
# 253 characters total) or make any attempt to process IDNs.
'hostname': re.compile(
r'''^
{label} # We must have at least one label
(?:\.{label})* # Followed by zero or more .labels
$
'''.format(label=label), re.X|re.I|re.UNICODE
),
}
def parse_address(address, allow_ranges=False):
"""
Takes a string and returns a (host, port) tuple. If the host is None, then
the string could not be parsed as a host identifier with an optional port
specification. If the port is None, then no port was specified.
The host identifier may be a hostname (qualified or not), an IPv4 address,
or an IPv6 address. If allow_ranges is True, then any of those may contain
[x:y] range specifications, e.g. foo[1:3] or foo[0:5]-bar[x-z].
The port number is an optional :NN suffix on an IPv4 address or host name,
or a mandatory :NN suffix on any square-bracketed expression: IPv6 address,
IPv4 address, or host name. (This means the only way to specify a port for
an IPv6 address is to enclose it in square brackets.)
"""
# First, we extract the port number if one is specified.
port = None
for type in ['bracketed_hostport', 'hostport']:
m = patterns[type].match(address)
if m:
(address, port) = m.groups()
port = int(port)
continue
# What we're left with now must be an IPv4 or IPv6 address, possibly with
# numeric ranges, or a hostname with alphanumeric ranges.
host = None
for type in ['ipv4', 'ipv6', 'hostname']:
m = patterns[type].match(address)
if m:
host = address
continue
# If it isn't any of the above, we don't understand it.
if not host:
return (None, None)
# If we get to this point, we know that any included ranges are valid. If
# the caller is prepared to handle them, all is well. Otherwise we treat
# it as a parse failure.
if not allow_ranges and '[' in host:
return (None, None)
return (host, port)
| gpl-3.0 |
vguzmanp/cloud-in-one | main_crypto.py | 1 | 1392 | #!/usr/bin/env python3
import getpass
import argparse
import shutil
from core.databaseManager import DatabaseManager
from core.securityModule import SecurityModule
def processFile(file_in_name, file_out_name, encrypt_flag):
user = input("CLOUD-IN-ONE Username: ")
password = getpass.getpass()
databaseManager = DatabaseManager(':memory:')
sec = SecurityModule(databaseManager, user, password)
file_processed = None
with open(file_in_name, 'rb') as f_in:
if encrypt_flag:
file_processed = sec.encrypt(f_in)
else:
file_processed = sec.decrypt(f_in)
with open(file_out_name, 'wb') as f_out:
file_processed.seek(0)
shutil.copyfileobj(file_processed, f_out)
file_processed.close()
def main():
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-d", "--decrypt", action="store_true")
group.add_argument("-e", "--encrypt", action="store_true")
parser.add_argument("file", help="the file to encrypt / decrypt")
parser.add_argument("file_output", help="name of the destination file")
args = parser.parse_args()
encrypt_flag = args.encrypt
if not encrypt_flag:
encrypt_flag = not args.decrypt
processFile(args.file, args.file_output, encrypt_flag)
if __name__ == '__main__':
main()
| mit |
bufferapp/buffer-django-nonrel | django/contrib/gis/db/backends/mysql/operations.py | 312 | 2418 | from django.db.backends.mysql.base import DatabaseOperations
from django.contrib.gis.db.backends.adapter import WKTAdapter
from django.contrib.gis.db.backends.base import BaseSpatialOperations
class MySQLOperations(DatabaseOperations, BaseSpatialOperations):
compiler_module = 'django.contrib.gis.db.models.sql.compiler'
mysql = True
name = 'mysql'
select = 'AsText(%s)'
from_wkb = 'GeomFromWKB'
from_text = 'GeomFromText'
Adapter = WKTAdapter
Adaptor = Adapter # Backwards-compatibility alias.
geometry_functions = {
'bbcontains' : 'MBRContains', # For consistency w/PostGIS API
'bboverlaps' : 'MBROverlaps', # .. ..
'contained' : 'MBRWithin', # .. ..
'contains' : 'MBRContains',
'disjoint' : 'MBRDisjoint',
'equals' : 'MBREqual',
'exact' : 'MBREqual',
'intersects' : 'MBRIntersects',
'overlaps' : 'MBROverlaps',
'same_as' : 'MBREqual',
'touches' : 'MBRTouches',
'within' : 'MBRWithin',
}
gis_terms = dict([(term, None) for term in geometry_functions.keys() + ['isnull']])
def geo_db_type(self, f):
return f.geom_type
def get_geom_placeholder(self, value, srid):
"""
The placeholder here has to include MySQL's WKT constructor. Because
MySQL does not support spatial transformations, there is no need to
modify the placeholder based on the contents of the given value.
"""
if hasattr(value, 'expression'):
placeholder = '%s.%s' % tuple(map(self.quote_name, value.cols[value.expression]))
else:
placeholder = '%s(%%s)' % self.from_text
return placeholder
def spatial_lookup_sql(self, lvalue, lookup_type, value, field, qn):
alias, col, db_type = lvalue
geo_col = '%s.%s' % (qn(alias), qn(col))
lookup_info = self.geometry_functions.get(lookup_type, False)
if lookup_info:
return "%s(%s, %s)" % (lookup_info, geo_col,
self.get_geom_placeholder(value, field.srid))
# TODO: Is this really necessary? MySQL can't handle NULL geometries
# in its spatial indexes anyways.
if lookup_type == 'isnull':
return "%s IS %sNULL" % (geo_col, (not value and 'NOT ' or ''))
raise TypeError("Got invalid lookup_type: %s" % repr(lookup_type))
| bsd-3-clause |
josesanch/django-oscar | sites/us/apps/shipping/south_migrations/0002_auto__del_orderanditemlevelchargemethod__add_orderanditemcharges__add_.py | 23 | 4591 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting model 'OrderAndItemLevelChargeMethod'
db.delete_table('shipping_orderanditemlevelchargemethod')
# Adding model 'OrderAndItemCharges'
db.create_table('shipping_orderanditemcharges', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('price_per_order', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2)),
('price_per_item', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2)),
('free_shipping_threshold', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),
))
db.send_create_signal('shipping', ['OrderAndItemCharges'])
# Adding model 'WeightBand'
db.create_table('shipping_weightband', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('method_code', self.gf('django.db.models.fields.CharField')(max_length=64, db_index=True)),
('upper_limit', self.gf('django.db.models.fields.FloatField')()),
('charge', self.gf('django.db.models.fields.DecimalField')(max_digits=12, decimal_places=2)),
))
db.send_create_signal('shipping', ['WeightBand'])
def backwards(self, orm):
# Adding model 'OrderAndItemLevelChargeMethod'
db.create_table('shipping_orderanditemlevelchargemethod', (
('code', self.gf('django.db.models.fields.CharField')(max_length=128, unique=True)),
('price_currency', self.gf('django.db.models.fields.CharField')(default='GBP', max_length=12)),
('description', self.gf('django.db.models.fields.TextField')(blank=True)),
('price_per_item', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2)),
('price_per_order', self.gf('django.db.models.fields.DecimalField')(default='0.00', max_digits=12, decimal_places=2)),
('free_shipping_threshold', self.gf('django.db.models.fields.DecimalField')(null=True, max_digits=12, decimal_places=2, blank=True)),
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('name', self.gf('django.db.models.fields.CharField')(max_length=128)),
))
db.send_create_signal('shipping', ['OrderAndItemLevelChargeMethod'])
# Deleting model 'OrderAndItemCharges'
db.delete_table('shipping_orderanditemcharges')
# Deleting model 'WeightBand'
db.delete_table('shipping_weightband')
models = {
'shipping.orderanditemcharges': {
'Meta': {'object_name': 'OrderAndItemCharges'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'free_shipping_threshold': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'price_per_item': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'}),
'price_per_order': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'shipping.weightband': {
'Meta': {'ordering': "['upper_limit']", 'object_name': 'WeightBand'},
'charge': ('django.db.models.fields.DecimalField', [], {'max_digits': '12', 'decimal_places': '2'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'method_code': ('django.db.models.fields.CharField', [], {'max_length': '64', 'db_index': 'True'}),
'upper_limit': ('django.db.models.fields.FloatField', [], {})
}
}
complete_apps = ['shipping']
| bsd-3-clause |
aospx-kitkat/platform_external_chromium_org | third_party/pexpect/ANSI.py | 171 | 12646 | """This implements an ANSI (VT100) terminal emulator as a subclass of screen.
PEXPECT LICENSE
This license is approved by the OSI and FSF as GPL-compatible.
http://opensource.org/licenses/isc-license.txt
Copyright (c) 2012, Noah Spurrier <noah@noah.org>
PERMISSION TO USE, COPY, MODIFY, AND/OR DISTRIBUTE THIS SOFTWARE FOR ANY
PURPOSE WITH OR WITHOUT FEE IS HEREBY GRANTED, PROVIDED THAT THE ABOVE
COPYRIGHT NOTICE AND THIS PERMISSION NOTICE APPEAR IN ALL COPIES.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""
# references:
# http://en.wikipedia.org/wiki/ANSI_escape_code
# http://www.retards.org/terminals/vt102.html
# http://vt100.net/docs/vt102-ug/contents.html
# http://vt100.net/docs/vt220-rm/
# http://www.termsys.demon.co.uk/vtansi.htm
import screen
import FSM
import copy
import string
#
# The 'Do.*' functions are helper functions for the ANSI class.
#
def DoEmit (fsm):
screen = fsm.memory[0]
screen.write_ch(fsm.input_symbol)
def DoStartNumber (fsm):
fsm.memory.append (fsm.input_symbol)
def DoBuildNumber (fsm):
ns = fsm.memory.pop()
ns = ns + fsm.input_symbol
fsm.memory.append (ns)
def DoBackOne (fsm):
screen = fsm.memory[0]
screen.cursor_back ()
def DoBack (fsm):
count = int(fsm.memory.pop())
screen = fsm.memory[0]
screen.cursor_back (count)
def DoDownOne (fsm):
screen = fsm.memory[0]
screen.cursor_down ()
def DoDown (fsm):
count = int(fsm.memory.pop())
screen = fsm.memory[0]
screen.cursor_down (count)
def DoForwardOne (fsm):
screen = fsm.memory[0]
screen.cursor_forward ()
def DoForward (fsm):
count = int(fsm.memory.pop())
screen = fsm.memory[0]
screen.cursor_forward (count)
def DoUpReverse (fsm):
screen = fsm.memory[0]
screen.cursor_up_reverse()
def DoUpOne (fsm):
screen = fsm.memory[0]
screen.cursor_up ()
def DoUp (fsm):
count = int(fsm.memory.pop())
screen = fsm.memory[0]
screen.cursor_up (count)
def DoHome (fsm):
c = int(fsm.memory.pop())
r = int(fsm.memory.pop())
screen = fsm.memory[0]
screen.cursor_home (r,c)
def DoHomeOrigin (fsm):
c = 1
r = 1
screen = fsm.memory[0]
screen.cursor_home (r,c)
def DoEraseDown (fsm):
screen = fsm.memory[0]
screen.erase_down()
def DoErase (fsm):
arg = int(fsm.memory.pop())
screen = fsm.memory[0]
if arg == 0:
screen.erase_down()
elif arg == 1:
screen.erase_up()
elif arg == 2:
screen.erase_screen()
def DoEraseEndOfLine (fsm):
screen = fsm.memory[0]
screen.erase_end_of_line()
def DoEraseLine (fsm):
arg = int(fsm.memory.pop())
screen = fsm.memory[0]
if arg == 0:
screen.erase_end_of_line()
elif arg == 1:
screen.erase_start_of_line()
elif arg == 2:
screen.erase_line()
def DoEnableScroll (fsm):
screen = fsm.memory[0]
screen.scroll_screen()
def DoCursorSave (fsm):
screen = fsm.memory[0]
screen.cursor_save_attrs()
def DoCursorRestore (fsm):
screen = fsm.memory[0]
screen.cursor_restore_attrs()
def DoScrollRegion (fsm):
screen = fsm.memory[0]
r2 = int(fsm.memory.pop())
r1 = int(fsm.memory.pop())
screen.scroll_screen_rows (r1,r2)
def DoMode (fsm):
screen = fsm.memory[0]
mode = fsm.memory.pop() # Should be 4
# screen.setReplaceMode ()
def DoLog (fsm):
screen = fsm.memory[0]
fsm.memory = [screen]
fout = open ('log', 'a')
fout.write (fsm.input_symbol + ',' + fsm.current_state + '\n')
fout.close()
class term (screen.screen):
"""This class is an abstract, generic terminal.
This does nothing. This is a placeholder that
provides a common base class for other terminals
such as an ANSI terminal. """
def __init__ (self, r=24, c=80):
screen.screen.__init__(self, r,c)
class ANSI (term):
"""This class implements an ANSI (VT100) terminal.
It is a stream filter that recognizes ANSI terminal
escape sequences and maintains the state of a screen object. """
def __init__ (self, r=24,c=80):
term.__init__(self,r,c)
#self.screen = screen (24,80)
self.state = FSM.FSM ('INIT',[self])
self.state.set_default_transition (DoLog, 'INIT')
self.state.add_transition_any ('INIT', DoEmit, 'INIT')
self.state.add_transition ('\x1b', 'INIT', None, 'ESC')
self.state.add_transition_any ('ESC', DoLog, 'INIT')
self.state.add_transition ('(', 'ESC', None, 'G0SCS')
self.state.add_transition (')', 'ESC', None, 'G1SCS')
self.state.add_transition_list ('AB012', 'G0SCS', None, 'INIT')
self.state.add_transition_list ('AB012', 'G1SCS', None, 'INIT')
self.state.add_transition ('7', 'ESC', DoCursorSave, 'INIT')
self.state.add_transition ('8', 'ESC', DoCursorRestore, 'INIT')
self.state.add_transition ('M', 'ESC', DoUpReverse, 'INIT')
self.state.add_transition ('>', 'ESC', DoUpReverse, 'INIT')
self.state.add_transition ('<', 'ESC', DoUpReverse, 'INIT')
self.state.add_transition ('=', 'ESC', None, 'INIT') # Selects application keypad.
self.state.add_transition ('#', 'ESC', None, 'GRAPHICS_POUND')
self.state.add_transition_any ('GRAPHICS_POUND', None, 'INIT')
self.state.add_transition ('[', 'ESC', None, 'ELB')
# ELB means Escape Left Bracket. That is ^[[
self.state.add_transition ('H', 'ELB', DoHomeOrigin, 'INIT')
self.state.add_transition ('D', 'ELB', DoBackOne, 'INIT')
self.state.add_transition ('B', 'ELB', DoDownOne, 'INIT')
self.state.add_transition ('C', 'ELB', DoForwardOne, 'INIT')
self.state.add_transition ('A', 'ELB', DoUpOne, 'INIT')
self.state.add_transition ('J', 'ELB', DoEraseDown, 'INIT')
self.state.add_transition ('K', 'ELB', DoEraseEndOfLine, 'INIT')
self.state.add_transition ('r', 'ELB', DoEnableScroll, 'INIT')
self.state.add_transition ('m', 'ELB', None, 'INIT')
self.state.add_transition ('?', 'ELB', None, 'MODECRAP')
self.state.add_transition_list (string.digits, 'ELB', DoStartNumber, 'NUMBER_1')
self.state.add_transition_list (string.digits, 'NUMBER_1', DoBuildNumber, 'NUMBER_1')
self.state.add_transition ('D', 'NUMBER_1', DoBack, 'INIT')
self.state.add_transition ('B', 'NUMBER_1', DoDown, 'INIT')
self.state.add_transition ('C', 'NUMBER_1', DoForward, 'INIT')
self.state.add_transition ('A', 'NUMBER_1', DoUp, 'INIT')
self.state.add_transition ('J', 'NUMBER_1', DoErase, 'INIT')
self.state.add_transition ('K', 'NUMBER_1', DoEraseLine, 'INIT')
self.state.add_transition ('l', 'NUMBER_1', DoMode, 'INIT')
### It gets worse... the 'm' code can have infinite number of
### number;number;number before it. I've never seen more than two,
### but the specs say it's allowed. crap!
self.state.add_transition ('m', 'NUMBER_1', None, 'INIT')
### LED control. Same implementation problem as 'm' code.
self.state.add_transition ('q', 'NUMBER_1', None, 'INIT')
# \E[?47h switch to alternate screen
# \E[?47l restores to normal screen from alternate screen.
self.state.add_transition_list (string.digits, 'MODECRAP', DoStartNumber, 'MODECRAP_NUM')
self.state.add_transition_list (string.digits, 'MODECRAP_NUM', DoBuildNumber, 'MODECRAP_NUM')
self.state.add_transition ('l', 'MODECRAP_NUM', None, 'INIT')
self.state.add_transition ('h', 'MODECRAP_NUM', None, 'INIT')
#RM Reset Mode Esc [ Ps l none
self.state.add_transition (';', 'NUMBER_1', None, 'SEMICOLON')
self.state.add_transition_any ('SEMICOLON', DoLog, 'INIT')
self.state.add_transition_list (string.digits, 'SEMICOLON', DoStartNumber, 'NUMBER_2')
self.state.add_transition_list (string.digits, 'NUMBER_2', DoBuildNumber, 'NUMBER_2')
self.state.add_transition_any ('NUMBER_2', DoLog, 'INIT')
self.state.add_transition ('H', 'NUMBER_2', DoHome, 'INIT')
self.state.add_transition ('f', 'NUMBER_2', DoHome, 'INIT')
self.state.add_transition ('r', 'NUMBER_2', DoScrollRegion, 'INIT')
### It gets worse... the 'm' code can have infinite number of
### number;number;number before it. I've never seen more than two,
### but the specs say it's allowed. crap!
self.state.add_transition ('m', 'NUMBER_2', None, 'INIT')
### LED control. Same problem as 'm' code.
self.state.add_transition ('q', 'NUMBER_2', None, 'INIT')
self.state.add_transition (';', 'NUMBER_2', None, 'SEMICOLON_X')
# Create a state for 'q' and 'm' which allows an infinite number of ignored numbers
self.state.add_transition_any ('SEMICOLON_X', DoLog, 'INIT')
self.state.add_transition_list (string.digits, 'SEMICOLON_X', None, 'NUMBER_X')
self.state.add_transition_any ('NUMBER_X', DoLog, 'INIT')
self.state.add_transition ('m', 'NUMBER_X', None, 'INIT')
self.state.add_transition ('q', 'NUMBER_X', None, 'INIT')
self.state.add_transition (';', 'NUMBER_2', None, 'SEMICOLON_X')
def process (self, c):
self.state.process(c)
def process_list (self, l):
self.write(l)
def write (self, s):
for c in s:
self.process(c)
def flush (self):
pass
def write_ch (self, ch):
"""This puts a character at the current cursor position. The cursor
position is moved forward with wrap-around, but no scrolling is done if
the cursor hits the lower-right corner of the screen. """
#\r and \n both produce a call to cr() and lf(), respectively.
ch = ch[0]
if ch == '\r':
self.cr()
return
if ch == '\n':
self.crlf()
return
if ch == chr(screen.BS):
self.cursor_back()
return
if ch not in string.printable:
fout = open ('log', 'a')
fout.write ('Nonprint: ' + str(ord(ch)) + '\n')
fout.close()
return
self.put_abs(self.cur_r, self.cur_c, ch)
old_r = self.cur_r
old_c = self.cur_c
self.cursor_forward()
if old_c == self.cur_c:
self.cursor_down()
if old_r != self.cur_r:
self.cursor_home (self.cur_r, 1)
else:
self.scroll_up ()
self.cursor_home (self.cur_r, 1)
self.erase_line()
# def test (self):
#
# import sys
# write_text = 'I\'ve got a ferret sticking up my nose.\n' + \
# '(He\'s got a ferret sticking up his nose.)\n' + \
# 'How it got there I can\'t tell\n' + \
# 'But now it\'s there it hurts like hell\n' + \
# 'And what is more it radically affects my sense of smell.\n' + \
# '(His sense of smell.)\n' + \
# 'I can see a bare-bottomed mandril.\n' + \
# '(Slyly eyeing his other nostril.)\n' + \
# 'If it jumps inside there too I really don\'t know what to do\n' + \
# 'I\'ll be the proud posessor of a kind of nasal zoo.\n' + \
# '(A nasal zoo.)\n' + \
# 'I\'ve got a ferret sticking up my nose.\n' + \
# '(And what is worst of all it constantly explodes.)\n' + \
# '"Ferrets don\'t explode," you say\n' + \
# 'But it happened nine times yesterday\n' + \
# 'And I should know for each time I was standing in the way.\n' + \
# 'I\'ve got a ferret sticking up my nose.\n' + \
# '(He\'s got a ferret sticking up his nose.)\n' + \
# 'How it got there I can\'t tell\n' + \
# 'But now it\'s there it hurts like hell\n' + \
# 'And what is more it radically affects my sense of smell.\n' + \
# '(His sense of smell.)'
# self.fill('.')
# self.cursor_home()
# for c in write_text:
# self.write_ch (c)
# print str(self)
#
#if __name__ == '__main__':
# t = ANSI(6,65)
# t.test()
| bsd-3-clause |
pepsipepsi/nodebox_opengl_python3 | nodebox/ext/psyco/src/test/pystone.py | 4 | 9251 | #! /usr/bin/env python
"""
"PYSTONE" Benchmark Program
Version: Python/1.1 (corresponds to C/1.1 plus 2 Pystone fixes)
Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013.
Translated from ADA to C by Rick Richardson.
Every method to preserve ADA-likeness has been used,
at the expense of C-ness.
Translated from C to Python by Guido van Rossum.
Version History:
Version 1.1 corrects two bugs in version 1.0:
First, it leaked memory: in Proc1(), NextRecord ends
up having a pointer to itself. I have corrected this
by zapping NextRecord.PtrComp at the end of Proc1().
Second, Proc3() used the operator != to compare a
record to None. This is rather inefficient and not
true to the intention of the original benchmark (where
a pointer comparison to None is intended; the !=
operator attempts to find a method __cmp__ to do value
comparison of the record). Version 1.1 runs 5-10
percent faster than version 1.0, so benchmark figures
of different versions can't be compared directly.
"""
LOOPS = 100000
LOOPS1 = 100000 # number of loops for the first run of Psyco
from time import clock
__version__ = "1.1"
[Ident1, Ident2, Ident3, Ident4, Ident5] = range(1, 6)
class Record:
def __init__(self, PtrComp = None, Discr = 0, EnumComp = 0,
IntComp = 0, StringComp = 0):
self.PtrComp = PtrComp
self.Discr = Discr
self.EnumComp = EnumComp
self.IntComp = IntComp
self.StringComp = StringComp
def copy(self):
return Record(self.PtrComp, self.Discr, self.EnumComp,
self.IntComp, self.StringComp)
TRUE = 1
FALSE = 0
def mydiv(a,b):
try:
return float(a) / b
except:
return float("nan")
def main():
print "Pystone(%s) time loops per second" % __version__
py_time, = pystones_reg(LOOPS1+LOOPS)
pyloop_time = py_time / (LOOPS1+LOOPS)
print "regular Python for %d passes %g %g" % \
(LOOPS1+LOOPS, py_time, mydiv(1, pyloop_time))
psy_time1, psy_time = pystones_psycho(LOOPS1, LOOPS)
print "Psyco for %d passes %g %g" % \
(LOOPS1, psy_time1, mydiv(LOOPS1, psy_time1))
print "Psyco for %d more passes %g %g" % \
(LOOPS, psy_time, mydiv(LOOPS, psy_time))
print "Total for %d passes %g %g" % \
(LOOPS1+LOOPS, psy_time1+psy_time,
mydiv(LOOPS1+LOOPS, psy_time1+psy_time))
# invert the equation system:
# psy_time1 = start_time + LOOPS1*loop_time
# psy_time1+psy_time = start_time + (LOOPS1+LOOPS)*loop_time
loop_time = psy_time/LOOPS
start_time = psy_time1 - LOOPS1*loop_time
print "Separated compilation/execution timings for %d passes" % \
(LOOPS1+LOOPS)
print "Compilation (i.e. start-up) %g %g" % \
(start_time, mydiv(1, start_time))
print "Machine code execution %g %g" % \
(loop_time*(LOOPS1+LOOPS), mydiv(1, loop_time))
print
print "Relative execution frequencies (iterations per second)"
print "iterations Psyco Python Psyco is ... times faster"
for d in range(8):
n = 10**d
psyco1 = mydiv(n, start_time+n*loop_time)
print " %8d %g %g %.2f" % \
(n, psyco1, mydiv(1, pyloop_time), psyco1*pyloop_time)
# invert the equation
# start_time + c*loop_time = c*pyloop_time
if pyloop_time <= loop_time:
print "Psyco is always slower than regular Python."
else:
c = mydiv(start_time, pyloop_time - loop_time)
print "Cut-off point: %.1f iterations" % c
if start_time < 0.07:
print "Note: start-up time is very low, the above figure is not reliable."
print "You should consider running the same benchmark a large number of times"
print "and taking the mean value for the cut-off point."
def pystones_psycho(*loopslist):
import psyco
#old_dict = Record.__dict__.copy()
#try:
# replace all methods of Record by proxies
#for key, value in old_dict.items():
# if type(value) is type(main):
# Record.__dict__[key] = _psyco.proxy(value, 99)
f = psyco.proxy(Proc0)
return map(f, loopslist)
#finally:
# Record.__dict__.update(old_dict)
def pystones_reg(*loopslist):
return map(Proc0, loopslist)
IntGlob = 0
BoolGlob = FALSE
Char1Glob = '\0'
Char2Glob = '\0'
Array1Glob = [0]*51
Array2Glob = map(lambda x: x[:], [Array1Glob]*51)
PtrGlb = None
PtrGlbNext = None
def Proc0(loops=LOOPS):
global IntGlob
global BoolGlob
global Char1Glob
global Char2Glob
global Array1Glob
global Array2Glob
global PtrGlb
global PtrGlbNext
PtrGlbNext = Record()
PtrGlb = Record()
PtrGlb.PtrComp = PtrGlbNext
PtrGlb.Discr = Ident1
PtrGlb.EnumComp = Ident3
PtrGlb.IntComp = 40
PtrGlb.StringComp = "DHRYSTONE PROGRAM, SOME STRING"
String1Loc = "DHRYSTONE PROGRAM, 1'ST STRING"
Array2Glob[8][7] = 10
starttime = clock()
for i in range(loops):
Proc5()
Proc4()
IntLoc1 = 2
IntLoc2 = 3
String2Loc = "DHRYSTONE PROGRAM, 2'ND STRING"
EnumLoc = Ident2
BoolGlob = not Func2(String1Loc, String2Loc)
while IntLoc1 < IntLoc2:
IntLoc3 = 5 * IntLoc1 - IntLoc2
IntLoc3 = Proc7(IntLoc1, IntLoc2)
IntLoc1 = IntLoc1 + 1
Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3)
PtrGlb = Proc1(PtrGlb)
CharIndex = 'A'
while CharIndex <= Char2Glob:
if EnumLoc == Func1(CharIndex, 'C'):
EnumLoc = Proc6(Ident1)
CharIndex = chr(ord(CharIndex)+1)
IntLoc3 = IntLoc2 * IntLoc1
IntLoc2 = IntLoc3 / IntLoc1
IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1
IntLoc1 = Proc2(IntLoc1)
benchtime = clock() - starttime
return benchtime
def Proc1(PtrParIn):
PtrParIn.PtrComp = NextRecord = PtrGlb.copy()
PtrParIn.IntComp = 5
NextRecord.IntComp = PtrParIn.IntComp
NextRecord.PtrComp = PtrParIn.PtrComp
NextRecord.PtrComp = Proc3(NextRecord.PtrComp)
if NextRecord.Discr == Ident1:
NextRecord.IntComp = 6
NextRecord.EnumComp = Proc6(PtrParIn.EnumComp)
NextRecord.PtrComp = PtrGlb.PtrComp
NextRecord.IntComp = Proc7(NextRecord.IntComp, 10)
else:
PtrParIn = NextRecord.copy()
NextRecord.PtrComp = None
return PtrParIn
def Proc2(IntParIO):
IntLoc = IntParIO + 10
while 1:
if Char1Glob == 'A':
IntLoc = IntLoc - 1
IntParIO = IntLoc - IntGlob
EnumLoc = Ident1
if EnumLoc == Ident1:
break
return IntParIO
def Proc3(PtrParOut):
global IntGlob
if PtrGlb is not None:
PtrParOut = PtrGlb.PtrComp
else:
IntGlob = 100
PtrGlb.IntComp = Proc7(10, IntGlob)
return PtrParOut
def Proc4():
global Char2Glob
BoolLoc = Char1Glob == 'A'
BoolLoc = BoolLoc or BoolGlob
Char2Glob = 'B'
def Proc5():
global Char1Glob
global BoolGlob
Char1Glob = 'A'
BoolGlob = FALSE
def Proc6(EnumParIn):
EnumParOut = EnumParIn
if not Func3(EnumParIn):
EnumParOut = Ident4
if EnumParIn == Ident1:
EnumParOut = Ident1
elif EnumParIn == Ident2:
if IntGlob > 100:
EnumParOut = Ident1
else:
EnumParOut = Ident4
elif EnumParIn == Ident3:
EnumParOut = Ident2
elif EnumParIn == Ident4:
pass
elif EnumParIn == Ident5:
EnumParOut = Ident3
return EnumParOut
def Proc7(IntParI1, IntParI2):
IntLoc = IntParI1 + 2
IntParOut = IntParI2 + IntLoc
return IntParOut
def Proc8(Array1Par, Array2Par, IntParI1, IntParI2):
global IntGlob
IntLoc = IntParI1 + 5
Array1Par[IntLoc] = IntParI2
Array1Par[IntLoc+1] = Array1Par[IntLoc]
Array1Par[IntLoc+30] = IntLoc
for IntIndex in range(IntLoc, IntLoc+2):
Array2Par[IntLoc][IntIndex] = IntLoc
Array2Par[IntLoc][IntLoc-1] = Array2Par[IntLoc][IntLoc-1] + 1
Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc]
IntGlob = 5
def Func1(CharPar1, CharPar2):
CharLoc1 = CharPar1
CharLoc2 = CharLoc1
if CharLoc2 != CharPar2:
return Ident1
else:
return Ident2
def Func2(StrParI1, StrParI2):
IntLoc = 1
while IntLoc <= 1:
if Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1:
CharLoc = 'A'
IntLoc = IntLoc + 1
if CharLoc >= 'W' and CharLoc <= 'Z':
IntLoc = 7
if CharLoc == 'X':
return TRUE
else:
if StrParI1 > StrParI2:
IntLoc = IntLoc + 7
return TRUE
else:
return FALSE
def Func3(EnumParIn):
EnumLoc = EnumParIn
if EnumLoc == Ident3: return TRUE
return FALSE
if __name__ == '__main__':
main()
| bsd-3-clause |
dcowden/cadquery-freecad-module | CadQuery/Libs/pygments/lexers/dalvik.py | 72 | 4420 | # -*- coding: utf-8 -*-
"""
pygments.lexers.dalvik
~~~~~~~~~~~~~~~~~~~~~~
Pygments lexers for Dalvik VM-related languages.
:copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
from pygments.lexer import RegexLexer, include, bygroups
from pygments.token import Keyword, Text, Comment, Name, String, Number, \
Punctuation
__all__ = ['SmaliLexer']
class SmaliLexer(RegexLexer):
"""
For `Smali <http://code.google.com/p/smali/>`_ (Android/Dalvik) assembly
code.
.. versionadded:: 1.6
"""
name = 'Smali'
aliases = ['smali']
filenames = ['*.smali']
mimetypes = ['text/smali']
tokens = {
'root': [
include('comment'),
include('label'),
include('field'),
include('method'),
include('class'),
include('directive'),
include('access-modifier'),
include('instruction'),
include('literal'),
include('punctuation'),
include('type'),
include('whitespace')
],
'directive': [
(r'^[ \t]*\.(class|super|implements|field|subannotation|annotation|'
r'enum|method|registers|locals|array-data|packed-switch|'
r'sparse-switch|catchall|catch|line|parameter|local|prologue|'
r'epilogue|source)', Keyword),
(r'^[ \t]*\.end (field|subannotation|annotation|method|array-data|'
'packed-switch|sparse-switch|parameter|local)', Keyword),
(r'^[ \t]*\.restart local', Keyword),
],
'access-modifier': [
(r'(public|private|protected|static|final|synchronized|bridge|'
r'varargs|native|abstract|strictfp|synthetic|constructor|'
r'declared-synchronized|interface|enum|annotation|volatile|'
r'transient)', Keyword),
],
'whitespace': [
(r'\n', Text),
(r'\s+', Text),
],
'instruction': [
(r'\b[vp]\d+\b', Name.Builtin), # registers
(r'\b[a-z][A-Za-z0-9/-]+\s+', Text), # instructions
],
'literal': [
(r'".*"', String),
(r'0x[0-9A-Fa-f]+t?', Number.Hex),
(r'[0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
(r'[0-9]+L?', Number.Integer),
],
'field': [
(r'(\$?\b)([\w$]*)(:)',
bygroups(Punctuation, Name.Variable, Punctuation)),
],
'method': [
(r'<(?:cl)?init>', Name.Function), # constructor
(r'(\$?\b)([\w$]*)(\()',
bygroups(Punctuation, Name.Function, Punctuation)),
],
'label': [
(r':\w+', Name.Label),
],
'class': [
# class names in the form Lcom/namespace/ClassName;
# I only want to color the ClassName part, so the namespace part is
# treated as 'Text'
(r'(L)((?:[\w$]+/)*)([\w$]+)(;)',
bygroups(Keyword.Type, Text, Name.Class, Text)),
],
'punctuation': [
(r'->', Punctuation),
(r'[{},():=.-]', Punctuation),
],
'type': [
(r'[ZBSCIJFDV\[]+', Keyword.Type),
],
'comment': [
(r'#.*?\n', Comment),
],
}
def analyse_text(text):
score = 0
if re.search(r'^\s*\.class\s', text, re.MULTILINE):
score += 0.5
if re.search(r'\b((check-cast|instance-of|throw-verification-error'
r')\b|(-to|add|[ais]get|[ais]put|and|cmpl|const|div|'
r'if|invoke|move|mul|neg|not|or|rem|return|rsub|shl|'
r'shr|sub|ushr)[-/])|{|}', text, re.MULTILINE):
score += 0.3
if re.search(r'(\.(catchall|epilogue|restart local|prologue)|'
r'\b(array-data|class-change-error|declared-synchronized|'
r'(field|inline|vtable)@0x[0-9a-fA-F]|generic-error|'
r'illegal-class-access|illegal-field-access|'
r'illegal-method-access|instantiation-error|no-error|'
r'no-such-class|no-such-field|no-such-method|'
r'packed-switch|sparse-switch))\b', text, re.MULTILINE):
score += 0.6
return score
| lgpl-3.0 |
hamiltont/CouchPotatoServer | libs/html5lib/trie/datrie.py | 785 | 1166 | from __future__ import absolute_import, division, unicode_literals
from datrie import Trie as DATrie
from six import text_type
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
chars = set()
for key in data.keys():
if not isinstance(key, text_type):
raise TypeError("All keys must be strings")
for char in key:
chars.add(char)
self._data = DATrie("".join(chars))
for key, value in data.items():
self._data[key] = value
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
raise NotImplementedError()
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
return self._data.keys(prefix)
def has_keys_with_prefix(self, prefix):
return self._data.has_keys_with_prefix(prefix)
def longest_prefix(self, prefix):
return self._data.longest_prefix(prefix)
def longest_prefix_item(self, prefix):
return self._data.longest_prefix_item(prefix)
| gpl-3.0 |
PaesslerAG/django-performance-testing | settings.py | 1 | 1267 | # Django settings for autodata project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
# Insert your TEMPLATE_CONTEXT_PROCESSORS here or use this
# list if you haven't customized them:
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'mq%31q+sjj^)m^tvy(klwqw6ksv7du2yzdf9-django_performance_testing'
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django_performance_testing',
'testapp',
)
STATIC_URL = '/static/'
ROOT_URLCONF = None
| bsd-3-clause |
puckipedia/youtube-dl | youtube_dl/extractor/musicvault.py | 73 | 2511 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class MusicVaultIE(InfoExtractor):
_VALID_URL = r'https?://www\.musicvault\.com/(?P<uploader_id>[^/?#]*)/video/(?P<display_id>[^/?#]*)_(?P<id>[0-9]+)\.html'
_TEST = {
'url': 'http://www.musicvault.com/the-allman-brothers-band/video/straight-from-the-heart_1010863.html',
'md5': '3adcbdb3dcc02d647539e53f284ba171',
'info_dict': {
'id': '1010863',
'ext': 'mp4',
'uploader_id': 'the-allman-brothers-band',
'title': 'Straight from the Heart',
'duration': 244,
'uploader': 'The Allman Brothers Band',
'thumbnail': 're:^https?://.*/thumbnail/.*',
'upload_date': '20131219',
'location': 'Capitol Theatre (Passaic, NJ)',
'description': 'Listen to The Allman Brothers Band perform Straight from the Heart at Capitol Theatre (Passaic, NJ) on Dec 16, 1981',
'timestamp': int,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
thumbnail = self._search_regex(
r'<meta itemprop="thumbnail" content="([^"]+)"',
webpage, 'thumbnail', fatal=False)
data_div = self._search_regex(
r'(?s)<div class="data">(.*?)</div>', webpage, 'data fields')
uploader = self._html_search_regex(
r'<h1.*?>(.*?)</h1>', data_div, 'uploader', fatal=False)
title = self._html_search_regex(
r'<h2.*?>(.*?)</h2>', data_div, 'title')
location = self._html_search_regex(
r'<h4.*?>(.*?)</h4>', data_div, 'location', fatal=False)
kaltura_id = self._search_regex(
r'<div id="video-detail-player" data-kaltura-id="([^"]+)"',
webpage, 'kaltura ID')
wid = self._search_regex(r'/wid/_([0-9]+)/', webpage, 'wid')
return {
'id': mobj.group('id'),
'_type': 'url_transparent',
'url': 'kaltura:%s:%s' % (wid, kaltura_id),
'ie_key': 'Kaltura',
'display_id': display_id,
'uploader_id': mobj.group('uploader_id'),
'thumbnail': thumbnail,
'description': self._html_search_meta('description', webpage),
'location': location,
'title': title,
'uploader': uploader,
}
| unlicense |
mvaled/sentry | src/sentry/runner/commands/permissions.py | 3 | 2488 | from __future__ import absolute_import, print_function
import click
from sentry.runner.decorators import configuration
def user_param_to_user(value):
from sentry.utils.auth import find_users
users = find_users(value)
if not users:
raise click.ClickException(u"No user matching `{}`".format(value))
if len(users) > 1:
raise click.ClickException(u"Found more than one user matching `{}`".format(value))
user = users[0]
if not user.is_superuser:
raise click.ClickException(
u"User `{}` does not have superuser status".format(user.username)
)
return user
@click.group()
def permissions():
"Manage Permissions for Users."
@permissions.command()
@click.option("--user", "-u", default=None, required=True)
@click.option("--permission", "-p", default=None, required=True)
@configuration
def add(user, permission):
"Add a permission to a user."
from django.db import IntegrityError, transaction
from sentry.models import UserPermission
user = user_param_to_user(user)
try:
with transaction.atomic():
UserPermission.objects.create(user=user, permission=permission)
except IntegrityError:
click.echo(u"Permission already exists for `{}`".format(user.username))
else:
click.echo(u"Added permission `{}` to `{}`".format(permission, user.username))
@permissions.command()
@click.option("--user", "-u", default=None, required=True)
@click.option("--permission", "-p", default=None, required=True)
@configuration
def remove(user, permission):
"Remove a permission from a user."
from sentry.models import UserPermission
user = user_param_to_user(user)
try:
up = UserPermission.objects.get(user=user, permission=permission)
except UserPermission.DoesNotExist:
click.echo(u"Permission does not exist for `{}`".format(user.username))
else:
up.delete()
click.echo(u"Removed permission `{}` from `{}`".format(permission, user.username))
@permissions.command()
@click.option("--user", "-u", default=None, required=True)
@configuration
def list(user):
"List permissions for a user."
from sentry.models import UserPermission
user = user_param_to_user(user)
up_list = UserPermission.objects.filter(user=user).order_by("permission")
click.echo(u"Permissions for `{}`:".format(user.username))
for permission in up_list:
click.echo(u"- {}".format(permission.permission))
| bsd-3-clause |
peterfpeterson/mantid | scripts/Muon/GUI/MuonAnalysis/dock/dock_widget.py | 3 | 2880 | # Mantid Repository : https://github.com/mantidproject/mantid
#
# Copyright © 2018 ISIS Rutherford Appleton Laboratory UKRI,
# NScD Oak Ridge National Laboratory, European Spallation Source,
# Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS
# SPDX - License - Identifier: GPL - 3.0 +
from qtpy import QtWidgets
from Muon.GUI.Common.context_example.context_example_widget import ContextExampleWidget
from Muon.GUI.Common.dummy.dummy_widget import DummyWidget
from Muon.GUI.Common.dummy_label.dummy_label_widget import DummyLabelWidget
from Muon.GUI.Common.dock.dock_view import DockView
from Muon.GUI.Common.muon_context.muon_context import *
class DockWidget(QtWidgets.QWidget):
"""
This is a special case of the widget class structure.
Normally we would only store the presenter and would
get the view via the presenter. However, the docks
have no logic and therefore do not have a presenter.
So this class simply wraps the dock (view) and
populates it
"""
def __init__(self, context, parent=None):
super(DockWidget, self).__init__(parent)
self.dockWidget = QtWidgets.QWidget()
self.dock_view = DockView(self)
self.context_example = ContextExampleWidget(context, parent=self)
self.dock_view.addDock(self.context_example.widget, "Example context")
self.btn = DummyWidget("moo", self)
self.dock_view.addDock(self.btn.widget, "first")
self.btn.setButtonConnection(self.handleButton)
self.label = DummyLabelWidget(context, Tab2Text, self)
self.dock_view.addDock(self.label.widget, "second")
self.btn2 = DummyWidget("waaa", self)
self.dock_view.addDock(self.btn2.widget, "third")
self.btn2.setButtonConnection(self.handleButton)
self.dock_view.makeTabs()
self.dock_view.keepDocksOpen()
QHbox = QtWidgets.QHBoxLayout()
QHbox.addWidget(self.dock_view)
self.dockWidget.setLayout(QHbox)
# set signals and slots
def setUpdateContext(self, slot):
self.context_example.setUpdateContext(slot)
# the buttons change the label value
# so we want to update context
self.btn.setButtonConnection(slot)
self.btn2.setButtonConnection(slot)
def loadFromProject(self, project):
self.label.updateLabel(project)
def handleButton(self, message):
self.label.updateLabel(message)
# interaction with context
def updateContext(self):
self.label.updateContext()
self.context_example.updateContext()
def loadFromContext(self):
self.label.loadFromContext()
self.context_example.loadFromContext()
# needed for docking
@property
def widget(self):
return self.dockWidget
def closeEvent(self, event):
self.dock_view.closeEvent(event)
| gpl-3.0 |
minghuascode/pyj | examples/gwtcanvas/ParticleDemo.py | 8 | 5449 | """
* Copyright 2008 Google Inc.
* Copyright (C) 2009 Luke Kenneth Casson Leighton <lkcl@lkcl.net>
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http:#www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
"""
from pyjamas.ui.Composite import Composite
from pyjamas.ui.Label import Label
from pyjamas.ui.VerticalPanel import VerticalPanel
from pyjamas.Timer import Timer
from pyjamas.Canvas.Color import Color
import time
from SimpleCanvasDemo import SimpleCanvasDemo
""" Linear congruent random number generator.
*
* Constants are from Knuth via Numerical Recipes in C.
*
"""
global ig
ig = 0
def rnd():
global ig
m = 217728
a = 84589
c = 45989
ig = (a*ig + c) % m
return float(ig)/m
"""*
* Model of a single particle object in the simulation.
"""
class Particle:
def __init__(self, chart):
self.chart = chart
self.kill = False
self.gravity = 0.1
self.xDampening = 0.9
self.yDampening = 0.9
self.xPos = (rnd() * self.chart.width)
self.yPos = (rnd() * self.chart.height)
# Get some negative velocities
self.xVel = (rnd() * 5 - 2.5)
self.yVel = (rnd() * 5 - 2.5)
def update(self):
# Reverse direction on boundaries
if self.xPos > self.chart.width or self.xPos < 0:
self.xVel = -self.xVel
self.xPos += self.xVel
# suck out some energy
self.xDampening = max(self.xDampening - 0.1, 0)
self.xVel *= self.xDampening
if self.yPos > self.chart.height or self.yPos < 0:
self.yVel = -self.yVel
self.yPos += self.yVel
# suck out some energy
self.yDampening = max(self.yDampening - 0.1,0)
self.yVel *= self.yDampening
if (self.yPos > self.chart.height - 4) and (abs(self.yVel) < 0.1):
self.yPos = self.chart.height
self.xVel = 0
self.gravity = 0
self.kill = True
self.xPos += self.xVel
self.yPos += self.yVel
# apply gravity
self.yVel += self.gravity
"""
* Not so much controls as feedback for benchmarking.
"""
class ParticleDemoControls (Composite):
def __init__(self):
self.average = 1
self.iterations = 1
self.startTime = -1
self.refreshRateLabel = Label("")
self.averageLabel = Label("")
layout = VerticalPanel()
layout.add(self.refreshRateLabel)
layout.add(self.averageLabel)
Composite.__init__(self, layout)
def doBenchmark(self, now):
if self.startTime < 0:
self.startTime = now
else:
self.refreshRate = now - self.startTime
self.startTime = now
self.average = ((self.average * self.iterations) + self.refreshRate) / (self.iterations + 1)
self.iterations += 1
self.refreshRateLabel.setText("Refresh Interval: " + str(refreshRate))
self.averageLabel.setText("Average Interval: " + str(average))
def resetBenchmark(self):
self.average = 1
self.iterations = 1
self.startTime = -1
"""*
* Simple particle Simulation showing off some of the Path API.
"""
class ParticleDemo (SimpleCanvasDemo):
def __init__(self, theCanvas):
SimpleCanvasDemo.__init__(self, theCanvas)
self.numParticles = 20
self.particles = []
self.takeBenchmarks = False
self.width = 400
self.height = 300
self.canvas = theCanvas
self.demoName = "Particle Demo"
self.run = False
def createControls(self):
self.controls = ParticleDemoControls()
def drawDemo(self):
self.canvas.resize(self.width, self.height)
self.particles = []
for i in range(self.numParticles):
self.particles.append( Particle(self) )
self.canvas.saveContext()
self.canvas.setLineWidth(2)
self.canvas.setStrokeStyle(Color(255,0,0))
self.run = True
Timer(10, self)
def onTimer(self, timer):
if not self.run:
return
self.renderingLoop()
timer.schedule(10)
def renderingLoop(self):
self.canvas.clear()
for i in range(len(self.particles)):
if self.particles[i].kill:
continue
self.particles[i].update()
self.canvas.beginPath()
self.canvas.moveTo(self.particles[i].xPos, self.particles[i].yPos)
self.canvas.lineTo(self.particles[i].xPos - self.particles[i].xVel,
self.particles[i].yPos - self.particles[i].yVel)
self.canvas.closePath()
self.canvas.stroke()
# take a benchmark
if self.takeBenchmarks:
self.controls.doBenchmark(time.gmtime())
def stopDemo(self):
self.run = False
self.controls.resetBenchmark()
self.canvas.restoreContext()
| apache-2.0 |
nichung/wwwflaskBlogrevA | env/lib/python2.7/site-packages/chardet/langhebrewmodel.py | 269 | 11345 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Simon Montagu
# Portions created by the Initial Developer are Copyright (C) 2005
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Shoshannah Forbes - original C code (?)
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
# 252: 0 - 9
# Windows-1255 language model
# Character Mapping Table:
WIN1255_CHAR_TO_ORDER_MAP = (
255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00
255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10
253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20
252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30
253, 69, 91, 79, 80, 92, 89, 97, 90, 68,111,112, 82, 73, 95, 85, # 40
78,121, 86, 71, 67,102,107, 84,114,103,115,253,253,253,253,253, # 50
253, 50, 74, 60, 61, 42, 76, 70, 64, 53,105, 93, 56, 65, 54, 49, # 60
66,110, 51, 43, 44, 63, 81, 77, 98, 75,108,253,253,253,253,253, # 70
124,202,203,204,205, 40, 58,206,207,208,209,210,211,212,213,214,
215, 83, 52, 47, 46, 72, 32, 94,216,113,217,109,218,219,220,221,
34,116,222,118,100,223,224,117,119,104,125,225,226, 87, 99,227,
106,122,123,228, 55,229,230,101,231,232,120,233, 48, 39, 57,234,
30, 59, 41, 88, 33, 37, 36, 31, 29, 35,235, 62, 28,236,126,237,
238, 38, 45,239,240,241,242,243,127,244,245,246,247,248,249,250,
9, 8, 20, 16, 3, 2, 24, 14, 22, 1, 25, 15, 4, 11, 6, 23,
12, 19, 13, 26, 18, 27, 21, 17, 7, 10, 5,251,252,128, 96,253,
)
# Model Table:
# total sequences: 100%
# first 512 sequences: 98.4004%
# first 1024 sequences: 1.5981%
# rest sequences: 0.087%
# negative sequences: 0.0015%
HEBREW_LANG_MODEL = (
0,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,2,3,2,1,2,0,1,0,0,
3,0,3,1,0,0,1,3,2,0,1,1,2,0,2,2,2,1,1,1,1,2,1,1,1,2,0,0,2,2,0,1,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,
1,2,1,2,1,2,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,
1,2,1,3,1,1,0,0,2,0,0,0,1,0,1,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,1,2,2,1,3,
1,2,1,1,2,2,0,0,2,2,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,1,0,1,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,2,2,2,3,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,2,3,2,2,3,2,2,2,1,2,2,2,2,
1,2,1,1,2,2,0,1,2,0,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,0,2,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,0,2,2,2,
0,2,1,2,2,2,0,0,2,1,0,0,0,0,1,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,2,1,2,3,2,2,2,
1,2,1,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,0,2,0,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,1,0,0,0,2,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,2,2,3,2,1,2,1,1,1,
0,1,1,1,1,1,3,0,1,0,0,0,0,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,1,1,0,0,1,0,0,1,0,0,0,0,
0,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,2,2,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,2,1,2,3,3,2,3,3,3,3,2,3,2,1,2,0,2,1,2,
0,2,0,2,2,2,0,0,1,2,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,1,2,2,3,3,2,3,2,3,2,2,3,1,2,2,0,2,2,2,
0,2,1,2,2,2,0,0,1,2,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,1,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,2,3,3,2,2,2,3,3,3,3,1,3,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,2,3,3,3,2,3,2,2,2,1,2,2,0,2,2,2,2,
0,2,0,2,2,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,1,3,2,3,3,2,3,3,2,2,1,2,2,2,2,2,2,
0,2,1,2,1,2,0,0,1,0,0,0,0,0,1,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,2,3,3,2,3,3,3,3,2,3,2,3,3,3,3,3,2,2,2,2,2,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,3,3,3,2,1,2,3,3,3,3,3,3,3,2,3,2,3,2,1,2,3,0,2,1,2,2,
0,2,1,1,2,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,2,0,
3,3,3,3,3,3,3,3,3,2,3,3,3,3,2,1,3,1,2,2,2,1,2,3,3,1,2,1,2,2,2,2,
0,1,1,1,1,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,
3,3,3,3,3,3,3,3,3,3,0,2,3,3,3,1,3,3,3,1,2,2,2,2,1,1,2,2,2,2,2,2,
0,2,0,1,1,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,3,3,3,3,3,2,3,3,3,2,2,3,3,3,2,1,2,3,2,3,2,2,2,2,1,2,1,1,1,2,2,
0,2,1,1,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,1,0,0,0,0,0,
1,0,1,0,0,0,0,0,2,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,3,3,3,3,2,3,3,2,3,1,2,2,2,2,3,2,3,1,1,2,2,1,2,2,1,1,0,2,2,2,2,
0,1,0,1,2,2,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,1,0,
3,0,0,1,1,0,1,0,0,1,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,1,2,2,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,1,0,1,0,1,1,0,1,1,0,0,0,1,1,0,1,1,1,0,0,0,0,0,0,1,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,0,0,0,1,1,0,1,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,
3,2,2,1,2,2,2,2,2,2,2,1,2,2,1,2,2,1,1,1,1,1,1,1,1,2,1,1,0,3,3,3,
0,3,0,2,2,2,2,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
2,2,2,3,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,2,1,2,2,2,1,1,1,2,0,1,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,2,2,2,2,2,2,0,2,2,0,0,0,0,0,0,
0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,3,1,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,1,2,1,0,2,1,0,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,1,1,1,1,1,1,1,1,1,1,0,0,1,1,1,1,0,1,1,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,
0,3,1,1,2,2,2,2,2,1,2,2,2,1,1,2,2,2,2,2,2,2,1,2,2,1,0,1,1,1,1,0,
0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
3,2,1,1,1,1,2,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,2,0,0,0,0,0,0,0,0,1,1,0,0,0,0,1,1,0,0,1,1,0,0,0,0,0,0,1,0,0,
2,1,1,2,2,2,2,2,2,2,2,2,2,2,1,2,2,2,2,2,1,2,1,2,1,1,1,1,0,0,0,0,
0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,2,1,2,2,2,2,2,2,2,2,2,2,1,2,1,2,1,1,2,1,1,1,2,1,2,1,2,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,3,1,2,2,2,1,2,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,2,1,2,1,1,0,1,0,1,
0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,2,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,2,2,
0,2,0,1,2,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,
3,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,1,1,1,1,1,1,0,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,1,1,1,0,1,0,0,0,1,1,0,1,1,0,0,0,0,0,1,1,0,0,
0,1,1,1,2,1,2,2,2,0,2,0,2,0,1,1,2,1,1,1,1,2,1,0,1,1,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,1,0,0,0,0,0,1,0,1,2,2,0,1,0,0,1,1,2,2,1,2,0,2,0,0,0,1,2,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,2,0,2,1,2,0,2,0,0,1,1,1,1,1,1,0,1,0,0,0,1,0,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,1,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,1,2,2,0,0,1,0,0,0,1,0,0,1,
1,1,2,1,0,1,1,1,0,1,0,1,1,1,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,2,1,
0,2,0,1,2,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,1,0,0,1,0,1,1,1,1,0,0,0,0,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,1,1,1,1,1,1,1,1,2,1,0,1,1,1,1,1,1,1,1,1,1,1,0,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,1,1,0,0,0,0,1,1,1,0,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,1,0,1,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,1,0,1,0,0,1,1,2,1,1,2,0,1,0,0,0,1,1,0,1,
1,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,0,0,2,1,1,2,0,2,0,0,0,1,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,2,2,1,2,1,1,0,1,0,0,0,1,1,0,1,
2,0,1,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,1,0,1,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,2,0,0,0,0,2,1,1,1,0,2,1,1,0,0,0,2,1,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,0,2,1,1,0,1,0,0,0,1,1,0,1,
2,2,1,1,1,0,1,1,0,1,1,0,1,0,0,0,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,2,1,1,0,1,0,0,1,1,0,1,2,1,0,2,0,0,0,1,1,0,1,
2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,
0,1,0,0,2,0,2,1,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0,0,1,0,0,0,0,0,1,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,1,0,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,1,0,1,1,2,0,1,0,0,1,1,1,0,1,0,0,1,0,0,0,1,0,0,1,
1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,1,0,1,1,0,0,1,0,0,2,1,1,1,1,1,0,1,0,0,0,0,1,0,1,
0,1,1,1,2,1,1,1,1,0,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,0,0,1,2,1,0,0,0,0,0,1,1,1,1,1,0,1,0,0,0,1,1,0,0,
)
Win1255HebrewModel = {
'char_to_order_map': WIN1255_CHAR_TO_ORDER_MAP,
'precedence_matrix': HEBREW_LANG_MODEL,
'typical_positive_ratio': 0.984004,
'keep_english_letter': False,
'charset_name': "windows-1255",
'language': 'Hebrew',
}
| mit |
nesdis/djongo | tests/django_tests/tests/v21/tests/forms_tests/widget_tests/test_timeinput.py | 84 | 1721 | from datetime import time
from django.forms import TimeInput
from django.test import override_settings
from django.utils import translation
from .base import WidgetTest
class TimeInputTest(WidgetTest):
widget = TimeInput()
def test_render_none(self):
self.check_html(self.widget, 'time', None, html='<input type="text" name="time">')
def test_render_value(self):
"""
The microseconds are trimmed on display, by default.
"""
t = time(12, 51, 34, 482548)
self.assertEqual(str(t), '12:51:34.482548')
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34">')
self.check_html(self.widget, 'time', time(12, 51, 34), html=(
'<input type="text" name="time" value="12:51:34">'
))
self.check_html(self.widget, 'time', time(12, 51), html=(
'<input type="text" name="time" value="12:51:00">'
))
def test_string(self):
"""Initializing from a string value."""
self.check_html(self.widget, 'time', '13:12:11', html=(
'<input type="text" name="time" value="13:12:11">'
))
def test_format(self):
"""
Use 'format' to change the way a value is displayed.
"""
t = time(12, 51, 34, 482548)
widget = TimeInput(format='%H:%M', attrs={'type': 'time'})
self.check_html(widget, 'time', t, html='<input type="time" name="time" value="12:51">')
@override_settings(USE_L10N=True)
@translation.override('de-at')
def test_l10n(self):
t = time(12, 51, 34, 482548)
self.check_html(self.widget, 'time', t, html='<input type="text" name="time" value="12:51:34">')
| agpl-3.0 |
TestInABox/openstackinabox | openstackinabox/tests/services/cinder/v1/volumes/test_retrieve.py | 1 | 1154 | """
"""
import unittest
import requests
import stackinabox.util.requests_mock.core
from stackinabox.stack import StackInABox
from openstackinabox.services.cinder import CinderV1Service
from openstackinabox.services.keystone import KeystoneV2Service
class TestCinderV1Retrieve(unittest.TestCase):
def setUp(self):
super(TestCinderV1Retrieve, self).setUp()
self.keystone = KeystoneV2Service()
self.cinder = CinderV1Service(self.keystone)
self.headers = {
'x-auth-token': self.keystone.model.tokens.admin_token
}
StackInABox.register_service(self.keystone)
StackInABox.register_service(self.cinder)
def tearDown(self):
super(TestCinderV1Retrieve, self).tearDown()
StackInABox.reset_services()
def test_volume_retrieve(self):
with stackinabox.util.requests_mock.core.activate():
stackinabox.util.requests_mock.core.requests_mock_registration(
'localhost'
)
res = requests.get(
'http://localhost/cinder/v1/volumes'
)
self.assertEqual(res.status_code, 500)
| apache-2.0 |
xuegang/gpdb | src/test/tinc/ext/pexpect.py | 21 | 62613 | """Pexpect is a Python module for spawning child applications and controlling
them automatically. Pexpect can be used for automating interactive applications
such as ssh, ftp, passwd, telnet, etc. It can be used to a automate setup
scripts for duplicating software package installations on different servers. It
can be used for automated software testing. Pexpect is in the spirit of Don
Libes' Expect, but Pexpect is pure Python. Other Expect-like modules for Python
require TCL and Expect or require C extensions to be compiled. Pexpect does not
use C, Expect, or TCL extensions. It should work on any platform that supports
the standard Python pty module. The Pexpect interface focuses on ease of use so
that simple tasks are easy.
There are two main interfaces to Pexpect -- the function, run() and the class,
spawn. You can call the run() function to execute a command and return the
output. This is a handy replacement for os.system().
For example:
pexpect.run('ls -la')
The more powerful interface is the spawn class. You can use this to spawn an
external child command and then interact with the child by sending lines and
expecting responses.
For example:
child = pexpect.spawn('scp foo myname@host.example.com:.')
child.expect ('Password:')
child.sendline (mypassword)
This works even for commands that ask for passwords or other input outside of
the normal stdio streams.
Credits:
Noah Spurrier, Richard Holden, Marco Molteni, Kimberley Burchett, Robert Stone,
Hartmut Goebel, Chad Schroeder, Erick Tryzelaar, Dave Kirby, Ids vander Molen,
George Todd, Noel Taylor, Nicolas D. Cesar, Alexander Gattin,
Geoffrey Marshall, Francisco Lourenco, Glen Mabey, Karthik Gurusamy,
Fernando Perez
(Let me know if I forgot anyone.)
Free, open source, and all that good stuff.
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Pexpect Copyright (c) 2006 Noah Spurrier
http://pexpect.sourceforge.net/
$Revision: #4 $
$Date: 2008/03/25 $
"""
try:
import os, sys, time
import select
import string
import re
import struct
import resource
import types
import pty
import tty
import termios
import fcntl
import errno
import traceback
import signal
except ImportError, e:
raise ImportError (str(e) + """
A critical module was not found. Probably this operating system does not support it.
Pexpect is intended for UNIX-like operating systems.""")
__version__ = '2.1'
__revision__ = '$Revision: #4 $'
__all__ = ['ExceptionPexpect', 'EOF', 'TIMEOUT', 'spawn', 'run', 'which', 'split_command_line',
'__version__', '__revision__']
# Exception classes used by this module.
class ExceptionPexpect(Exception):
"""Base class for all exceptions raised by this module.
"""
def __init__(self, value):
self.value = value
def __str__(self):
return str(self.value)
def get_trace(self):
"""This returns an abbreviated stack trace with lines that only concern the caller.
In other words, the stack trace inside the Pexpect module is not included.
"""
tblist = traceback.extract_tb(sys.exc_info()[2])
tblist = filter(self.__filter_not_pexpect, tblist)
tblist = traceback.format_list(tblist)
return ''.join(tblist)
def __filter_not_pexpect(self, trace_list_item):
if trace_list_item[0].find('pexpect.py') == -1:
return True
else:
return False
class EOF(ExceptionPexpect):
"""Raised when EOF is read from a child.
"""
class TIMEOUT(ExceptionPexpect):
"""Raised when a read time exceeds the timeout.
"""
##class TIMEOUT_PATTERN(TIMEOUT):
## """Raised when the pattern match time exceeds the timeout.
## This is different than a read TIMEOUT because the child process may
## give output, thus never give a TIMEOUT, but the output
## may never match a pattern.
## """
##class MAXBUFFER(ExceptionPexpect):
## """Raised when a scan buffer fills before matching an expected pattern."""
def run (command, timeout=-1, withexitstatus=False, events=None, extra_args=None, logfile=None):
"""This function runs the given command; waits for it to finish;
then returns all output as a string. STDERR is included in output.
If the full path to the command is not given then the path is searched.
Note that lines are terminated by CR/LF (\\r\\n) combination
even on UNIX-like systems because this is the standard for pseudo ttys.
If you set withexitstatus to true, then run will return a tuple of
(command_output, exitstatus). If withexitstatus is false then this
returns just command_output.
The run() function can often be used instead of creating a spawn instance.
For example, the following code uses spawn:
from pexpect import *
child = spawn('scp foo myname@host.example.com:.')
child.expect ('(?i)password')
child.sendline (mypassword)
The previous code can be replace with the following, which you may
or may not find simpler:
from pexpect import *
run ('scp foo myname@host.example.com:.', events={'(?i)password': mypassword})
Examples:
Start the apache daemon on the local machine:
from pexpect import *
run ("/usr/local/apache/bin/apachectl start")
Check in a file using SVN:
from pexpect import *
run ("svn ci -m 'automatic commit' my_file.py")
Run a command and capture exit status:
from pexpect import *
(command_output, exitstatus) = run ('ls -l /bin', withexitstatus=1)
Tricky Examples:
The following will run SSH and execute 'ls -l' on the remote machine.
The password 'secret' will be sent if the '(?i)password' pattern is ever seen.
run ("ssh username@machine.example.com 'ls -l'", events={'(?i)password':'secret\n'})
This will start mencoder to rip a video from DVD. This will also display
progress ticks every 5 seconds as it runs.
from pexpect import *
def print_ticks(d):
print d['event_count'],
run ("mencoder dvd://1 -o video.avi -oac copy -ovc copy", events={TIMEOUT:print_ticks}, timeout=5)
The 'events' argument should be a dictionary of patterns and responses.
Whenever one of the patterns is seen in the command out
run() will send the associated response string. Note that you should
put newlines in your string if Enter is necessary.
The responses may also contain callback functions.
Any callback is function that takes a dictionary as an argument.
The dictionary contains all the locals from the run() function, so
you can access the child spawn object or any other variable defined
in run() (event_count, child, and extra_args are the most useful).
A callback may return True to stop the current run process otherwise
run() continues until the next event.
A callback may also return a string which will be sent to the child.
'extra_args' is not used by directly run(). It provides a way to pass data to
a callback function through run() through the locals dictionary passed to a callback.
"""
if timeout == -1:
child = spawn(command, maxread=2000, logfile=logfile)
else:
child = spawn(command, timeout=timeout, maxread=2000, logfile=logfile)
if events is not None:
patterns = events.keys()
responses = events.values()
else:
patterns=None # We assume that EOF or TIMEOUT will save us.
responses=None
child_result_list = []
event_count = 0
while 1:
try:
index = child.expect (patterns)
if type(child.after) is types.StringType:
child_result_list.append(child.before + child.after)
else: # child.after may have been a TIMEOUT or EOF, so don't cat those.
child_result_list.append(child.before)
if type(responses[index]) is types.StringType:
child.send(responses[index])
elif type(responses[index]) is types.FunctionType:
callback_result = responses[index](locals())
sys.stdout.flush()
if type(callback_result) is types.StringType:
child.send(callback_result)
elif callback_result:
break
else:
raise TypeError ('The callback must be a string or function type.')
event_count = event_count + 1
except TIMEOUT, e:
child_result_list.append(child.before)
break
except EOF, e:
child_result_list.append(child.before)
break
child_result = ''.join(child_result_list)
if withexitstatus:
child.close()
return (child_result, child.exitstatus)
else:
return child_result
class spawn (object):
"""This is the main class interface for Pexpect.
Use this class to start and control child applications.
"""
def __init__(self, command, args=[], timeout=30, maxread=2000, searchwindowsize=None, logfile=None, env=None):
"""This is the constructor. The command parameter may be a string
that includes a command and any arguments to the command. For example:
p = pexpect.spawn ('/usr/bin/ftp')
p = pexpect.spawn ('/usr/bin/ssh user@example.com')
p = pexpect.spawn ('ls -latr /tmp')
You may also construct it with a list of arguments like so:
p = pexpect.spawn ('/usr/bin/ftp', [])
p = pexpect.spawn ('/usr/bin/ssh', ['user@example.com'])
p = pexpect.spawn ('ls', ['-latr', '/tmp'])
After this the child application will be created and
will be ready to talk to. For normal use, see expect() and
send() and sendline().
The maxread attribute sets the read buffer size.
This is maximum number of bytes that Pexpect will try to read
from a TTY at one time.
Seeting the maxread size to 1 will turn off buffering.
Setting the maxread value higher may help performance in cases
where large amounts of output are read back from the child.
This feature is useful in conjunction with searchwindowsize.
The searchwindowsize attribute sets the how far back in
the incomming seach buffer Pexpect will search for pattern matches.
Every time Pexpect reads some data from the child it will append the data to
the incomming buffer. The default is to search from the beginning of the
imcomming buffer each time new data is read from the child.
But this is very inefficient if you are running a command that
generates a large amount of data where you want to match
The searchwindowsize does not effect the size of the incomming data buffer.
You will still have access to the full buffer after expect() returns.
The logfile member turns on or off logging.
All input and output will be copied to the given file object.
Set logfile to None to stop logging. This is the default.
Set logfile to sys.stdout to echo everything to standard output.
The logfile is flushed after each write.
Example 1:
child = pexpect.spawn('some_command')
fout = file('mylog.txt','w')
child.logfile = fout
Example 2:
child = pexpect.spawn('some_command')
child.logfile = sys.stdout
The delaybeforesend helps overcome a weird behavior that many users were experiencing.
The typical problem was that a user would expect() a "Password:" prompt and
then immediately call sendline() to send the password. The user would then
see that their password was echoed back to them. Passwords don't
normally echo. The problem is caused by the fact that most applications
print out the "Password" prompt and then turn off stdin echo, but if you
send your password before the application turned off echo, then you get
your password echoed. Normally this wouldn't be a problem when interacting
with a human at a real heyboard. If you introduce a slight delay just before
writing then this seems to clear up the problem. This was such a common problem
for many users that I decided that the default pexpect behavior
should be to sleep just before writing to the child application.
1/10th of a second (100 ms) seems to be enough to clear up the problem.
You can set delaybeforesend to 0 to return to the old behavior.
Note that spawn is clever about finding commands on your path.
It uses the same logic that "which" uses to find executables.
If you wish to get the exit status of the child you must call
the close() method. The exit or signal status of the child will be
stored in self.exitstatus or self.signalstatus.
If the child exited normally then exitstatus will store the exit return code and
signalstatus will be None.
If the child was terminated abnormally with a signal then signalstatus will store
the signal value and exitstatus will be None.
If you need more detail you can also read the self.status member which stores
the status returned by os.waitpid. You can interpret this using
os.WIFEXITED/os.WEXITSTATUS or os.WIFSIGNALED/os.TERMSIG.
"""
self.STDIN_FILENO = pty.STDIN_FILENO
self.STDOUT_FILENO = pty.STDOUT_FILENO
self.STDERR_FILENO = pty.STDERR_FILENO
self.stdin = sys.stdin
self.stdout = sys.stdout
self.stderr = sys.stderr
self.patterns = None
self.ignorecase = False
self.before = None
self.after = None
self.match = None
self.match_index = None
self.terminated = True
self.exitstatus = None
self.signalstatus = None
self.status = None # status returned by os.waitpid
self.flag_eof = False
self.pid = None
self.child_fd = -1 # initially closed
self.timeout = timeout
self.delimiter = EOF
self.logfile = logfile
self.maxread = maxread # Max bytes to read at one time into buffer.
self.buffer = '' # This is the read buffer. See maxread.
self.searchwindowsize = searchwindowsize # Anything before searchwindowsize point is preserved, but not searched.
self.delaybeforesend = 0.1 # Sets sleep time used just before sending data to child.
self.delayafterclose = 0.1 # Sets delay in close() method to allow kernel time to update process status.
self.delayafterterminate = 0.1 # Sets delay in terminate() method to allow kernel time to update process status.
self.softspace = False # File-like object.
self.name = '<' + repr(self) + '>' # File-like object.
self.encoding = None # File-like object.
self.closed = True # File-like object.
self.env = env
self.__irix_hack = sys.platform.lower().find('irix') >= 0 # This flags if we are running on irix
self.use_native_pty_fork = not (sys.platform.lower().find('solaris') >= 0) # Solaris uses internal __fork_pty(). All other use pty.fork().
# allow dummy instances for subclasses that may not use command or args.
if command is None:
self.command = None
self.args = None
self.name = '<pexpect factory incomplete>'
return
# If command is an int type then it may represent a file descriptor.
if type(command) == type(0):
raise ExceptionPexpect ('Command is an int type. If this is a file descriptor then maybe you want to use fdpexpect.fdspawn which takes an existing file descriptor instead of a command string.')
if type (args) != type([]):
raise TypeError ('The argument, args, must be a list.')
if args == []:
self.args = split_command_line(command)
self.command = self.args[0]
else:
self.args = args[:] # work with a copy
self.args.insert (0, command)
self.command = command
command_with_path = which(self.command)
if command_with_path is None:
raise ExceptionPexpect ('The command was not found or was not executable: %s.' % self.command)
self.command = command_with_path
self.args[0] = self.command
self.name = '<' + ' '.join (self.args) + '>'
self.__spawn()
def __del__(self):
"""This makes sure that no system resources are left open.
Python only garbage collects Python objects. OS file descriptors
are not Python objects, so they must be handled explicitly.
If the child file descriptor was opened outside of this class
(passed to the constructor) then this does not close it.
"""
if not self.closed:
self.close()
def __str__(self):
"""This returns the current state of the pexpect object as a string.
"""
s = []
s.append(repr(self))
s.append('version: ' + __version__ + ' (' + __revision__ + ')')
s.append('command: ' + str(self.command))
s.append('args: ' + str(self.args))
if self.patterns is None:
s.append('patterns: None')
else:
s.append('patterns:')
for p in self.patterns:
if type(p) is type(re.compile('')):
s.append(' ' + str(p.pattern))
else:
s.append(' ' + str(p))
s.append('buffer (last 100 chars): ' + str(self.buffer)[-100:])
s.append('before (last 100 chars): ' + str(self.before)[-100:])
s.append('after: ' + str(self.after))
s.append('match: ' + str(self.match))
s.append('match_index: ' + str(self.match_index))
s.append('exitstatus: ' + str(self.exitstatus))
s.append('flag_eof: ' + str(self.flag_eof))
s.append('pid: ' + str(self.pid))
s.append('child_fd: ' + str(self.child_fd))
s.append('closed: ' + str(self.closed))
s.append('timeout: ' + str(self.timeout))
s.append('delimiter: ' + str(self.delimiter))
s.append('logfile: ' + str(self.logfile))
s.append('maxread: ' + str(self.maxread))
s.append('ignorecase: ' + str(self.ignorecase))
s.append('searchwindowsize: ' + str(self.searchwindowsize))
s.append('delaybeforesend: ' + str(self.delaybeforesend))
s.append('delayafterclose: ' + str(self.delayafterclose))
s.append('delayafterterminate: ' + str(self.delayafterterminate))
return '\n'.join(s)
def __spawn(self):
"""This starts the given command in a child process.
This does all the fork/exec type of stuff for a pty.
This is called by __init__.
"""
# The pid and child_fd of this object get set by this method.
# Note that it is difficult for this method to fail.
# You cannot detect if the child process cannot start.
# So the only way you can tell if the child process started
# or not is to try to read from the file descriptor. If you get
# EOF immediately then it means that the child is already dead.
# That may not necessarily be bad because you may haved spawned a child
# that performs some task; creates no stdout output; and then dies.
assert self.pid is None, 'The pid member should be None.'
assert self.command is not None, 'The command member should not be None.'
if self.use_native_pty_fork:
try:
self.pid, self.child_fd = pty.fork()
except OSError, e:
raise ExceptionPexpect('Error! pty.fork() failed: ' + str(e))
else: # Use internal __fork_pty
self.pid, self.child_fd = self.__fork_pty()
if self.pid == 0: # Child
try:
self.child_fd = sys.stdout.fileno() # used by setwinsize()
self.setwinsize(24, 80)
except:
# Some platforms do not like setwinsize (Cygwin).
# This will cause problem when running applications that
# are very picky about window size.
# This is a serious limitation, but not a show stopper.
pass
# Do not allow child to inherit open file descriptors from parent.
max_fd = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
for i in range (3, max_fd):
try:
os.close (i)
except OSError:
pass
# I don't know why this works, but ignoring SIGHUP fixes a
# problem when trying to start a Java daemon with sudo
# (specifically, Tomcat).
signal.signal(signal.SIGHUP, signal.SIG_IGN)
if self.env is None:
os.execv(self.command, self.args)
else:
os.execvpe(self.command, self.args, self.env)
# Parent
self.terminated = False
self.closed = False
def __fork_pty(self):
"""This implements a substitute for the forkpty system call.
This should be more portable than the pty.fork() function.
Specifically, this should work on Solaris.
Modified 10.06.05 by Geoff Marshall:
Implemented __fork_pty() method to resolve the issue with Python's
pty.fork() not supporting Solaris, particularly ssh.
Based on patch to posixmodule.c authored by Noah Spurrier:
http://mail.python.org/pipermail/python-dev/2003-May/035281.html
"""
parent_fd, child_fd = os.openpty()
if parent_fd < 0 or child_fd < 0:
raise ExceptionPexpect, "Error! Could not open pty with os.openpty()."
pid = os.fork()
if pid < 0:
raise ExceptionPexpect, "Error! Failed os.fork()."
elif pid == 0:
# Child.
os.close(parent_fd)
self.__pty_make_controlling_tty(child_fd)
os.dup2(child_fd, 0)
os.dup2(child_fd, 1)
os.dup2(child_fd, 2)
if child_fd > 2:
os.close(child_fd)
else:
# Parent.
os.close(child_fd)
return pid, parent_fd
def __pty_make_controlling_tty(self, tty_fd):
"""This makes the pseudo-terminal the controlling tty.
This should be more portable than the pty.fork() function.
Specifically, this should work on Solaris.
"""
child_name = os.ttyname(tty_fd)
# Disconnect from controlling tty if still connected.
fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY);
if fd >= 0:
os.close(fd)
os.setsid()
# Verify we are disconnected from controlling tty
try:
fd = os.open("/dev/tty", os.O_RDWR | os.O_NOCTTY);
if fd >= 0:
os.close(fd)
raise ExceptionPexpect, "Error! We are not disconnected from a controlling tty."
except:
# Good! We are disconnected from a controlling tty.
pass
# Verify we can open child pty.
fd = os.open(child_name, os.O_RDWR);
if fd < 0:
raise ExceptionPexpect, "Error! Could not open child pty, " + child_name
else:
os.close(fd)
# Verify we now have a controlling tty.
fd = os.open("/dev/tty", os.O_WRONLY)
if fd < 0:
raise ExceptionPexpect, "Error! Could not open controlling tty, /dev/tty"
else:
os.close(fd)
def fileno (self): # File-like object.
"""This returns the file descriptor of the pty for the child.
"""
return self.child_fd
def close (self, force=True): # File-like object.
"""This closes the connection with the child application.
Note that calling close() more than once is valid.
This emulates standard Python behavior with files.
Set force to True if you want to make sure that the child is terminated
(SIGKILL is sent if the child ignores SIGHUP and SIGINT).
"""
if not self.closed:
self.flush()
os.close (self.child_fd)
self.child_fd = -1
self.closed = True
time.sleep(self.delayafterclose) # Give kernel time to update process status.
if self.isalive():
if not self.terminate(force):
raise ExceptionPexpect ('close() could not terminate the child using terminate()')
def flush (self): # File-like object.
"""This does nothing. It is here to support the interface for a File-like object.
"""
pass
def isatty (self): # File-like object.
"""This returns True if the file descriptor is open and connected to a tty(-like) device, else False.
"""
return os.isatty(self.child_fd)
def setecho (self, state):
"""This sets the terminal echo mode on or off.
Note that anything the child sent before the echo will be lost, so
you should be sure that your input buffer is empty before you setecho.
For example, the following will work as expected.
p = pexpect.spawn('cat')
p.sendline ('1234') # We will see this twice (once from tty echo and again from cat).
p.expect (['1234'])
p.expect (['1234'])
p.setecho(False) # Turn off tty echo
p.sendline ('abcd') # We will set this only once (echoed by cat).
p.sendline ('wxyz') # We will set this only once (echoed by cat)
p.expect (['abcd'])
p.expect (['wxyz'])
The following WILL NOT WORK because the lines sent before the setecho
will be lost:
p = pexpect.spawn('cat')
p.sendline ('1234') # We will see this twice (once from tty echo and again from cat).
p.setecho(False) # Turn off tty echo
p.sendline ('abcd') # We will set this only once (echoed by cat).
p.sendline ('wxyz') # We will set this only once (echoed by cat)
p.expect (['1234'])
p.expect (['1234'])
p.expect (['abcd'])
p.expect (['wxyz'])
"""
self.child_fd
new = termios.tcgetattr(self.child_fd)
if state:
new[3] = new[3] | termios.ECHO
else:
new[3] = new[3] & ~termios.ECHO
# I tried TCSADRAIN and TCSAFLUSH, but these were inconsistent
# and blocked on some platforms. TCSADRAIN is probably ideal if it worked.
termios.tcsetattr(self.child_fd, termios.TCSANOW, new)
def read_nonblocking (self, size = 1, timeout = -1):
"""This reads at most size characters from the child application.
It includes a timeout. If the read does not complete within the
timeout period then a TIMEOUT exception is raised.
If the end of file is read then an EOF exception will be raised.
If a log file was set using setlog() then all data will
also be written to the log file.
If timeout==None then the read may block indefinitely.
If timeout==-1 then the self.timeout value is used.
If timeout==0 then the child is polled and
if there was no data immediately ready then this will raise a TIMEOUT exception.
The "timeout" refers only to the amount of time to read at least one character.
This is not effected by the 'size' parameter, so if you call
read_nonblocking(size=100, timeout=30) and only one character is
available right away then one character will be returned immediately.
It will not wait for 30 seconds for another 99 characters to come in.
This is a wrapper around os.read().
It uses select.select() to implement a timeout.
"""
if self.closed:
raise ValueError ('I/O operation on closed file in read_nonblocking().')
if timeout == -1:
timeout = self.timeout
# Note that some systems such as Solaris do not give an EOF when
# the child dies. In fact, you can still try to read
# from the child_fd -- it will block forever or until TIMEOUT.
# For this case, I test isalive() before doing any reading.
# If isalive() is false, then I pretend that this is the same as EOF.
if not self.isalive():
r,w,e = self.__select([self.child_fd], [], [], 0) # timeout of 0 means "poll"
if not r:
self.flag_eof = True
raise EOF ('End Of File (EOF) in read_nonblocking(). Braindead platform.')
elif self.__irix_hack:
# This is a hack for Irix. It seems that Irix requires a long delay before checking isalive.
# This adds a 2 second delay, but only when the child is terminated.
r, w, e = self.__select([self.child_fd], [], [], 2)
if not r and not self.isalive():
self.flag_eof = True
raise EOF ('End Of File (EOF) in read_nonblocking(). Pokey platform.')
r,w,e = self.__select([self.child_fd], [], [], timeout)
if not r:
if not self.isalive():
# Some platforms, such as Irix, will claim that their processes are alive;
# then timeout on the select; and then finally admit that they are not alive.
self.flag_eof = True
raise EOF ('End of File (EOF) in read_nonblocking(). Very pokey platform.')
else:
raise TIMEOUT ('Timeout exceeded in read_nonblocking().')
if self.child_fd in r:
try:
s = os.read(self.child_fd, size)
except OSError, e: # Linux does this
self.flag_eof = True
raise EOF ('End Of File (EOF) in read_nonblocking(). Exception style platform.')
if s == '': # BSD style
self.flag_eof = True
raise EOF ('End Of File (EOF) in read_nonblocking(). Empty string style platform.')
if self.logfile is not None:
self.logfile.write (s)
self.logfile.flush()
return s
raise ExceptionPexpect ('Reached an unexpected state in read_nonblocking().')
def read (self, size = -1): # File-like object.
"""This reads at most "size" bytes from the file
(less if the read hits EOF before obtaining size bytes).
If the size argument is negative or omitted,
read all data until EOF is reached.
The bytes are returned as a string object.
An empty string is returned when EOF is encountered immediately.
"""
if size == 0:
return ''
if size < 0:
self.expect (self.delimiter) # delimiter default is EOF
return self.before
# I could have done this more directly by not using expect(), but
# I deliberately decided to couple read() to expect() so that
# I would catch any bugs early and ensure consistant behavior.
# It's a little less efficient, but there is less for me to
# worry about if I have to later modify read() or expect().
# Note, it's OK if size==-1 in the regex. That just means it
# will never match anything in which case we stop only on EOF.
cre = re.compile('.{%d}' % size, re.DOTALL)
index = self.expect ([cre, self.delimiter]) # delimiter default is EOF
if index == 0:
return self.after ### self.before should be ''. Should I assert this?
return self.before
def readline (self, size = -1): # File-like object.
"""This reads and returns one entire line. A trailing newline is kept in
the string, but may be absent when a file ends with an incomplete line.
Note: This readline() looks for a \\r\\n pair even on UNIX because
this is what the pseudo tty device returns. So contrary to what you
may expect you will receive the newline as \\r\\n.
An empty string is returned when EOF is hit immediately.
Currently, the size agument is mostly ignored, so this behavior is not
standard for a file-like object. If size is 0 then an empty string
is returned.
"""
if size == 0:
return ''
index = self.expect (['\r\n', self.delimiter]) # delimiter default is EOF
if index == 0:
return self.before + '\r\n'
else:
return self.before
def __iter__ (self): # File-like object.
"""This is to support iterators over a file-like object.
"""
return self
def next (self): # File-like object.
"""This is to support iterators over a file-like object.
"""
result = self.readline()
if result == "":
raise StopIteration
return result
def readlines (self, sizehint = -1): # File-like object.
"""This reads until EOF using readline() and returns a list containing
the lines thus read. The optional "sizehint" argument is ignored.
"""
lines = []
while True:
line = self.readline()
if not line:
break
lines.append(line)
return lines
def write(self, str): # File-like object.
"""This is similar to send() except that there is no return value.
"""
self.send (str)
def writelines (self, sequence): # File-like object.
"""This calls write() for each element in the sequence.
The sequence can be any iterable object producing strings,
typically a list of strings. This does not add line separators
There is no return value.
"""
for str in sequence:
self.write (str)
def send(self, str):
"""This sends a string to the child process.
This returns the number of bytes written.
If a log file was set then the data is also written to the log.
"""
time.sleep(self.delaybeforesend)
if self.logfile is not None:
self.logfile.write (str)
self.logfile.flush()
c = os.write(self.child_fd, str)
return c
def sendline(self, str=''):
"""This is like send(), but it adds a line feed (os.linesep).
This returns the number of bytes written.
"""
n = self.send(str)
n = n + self.send (os.linesep)
return n
def sendeof(self):
"""This sends an EOF to the child.
This sends a character which causes the pending parent output
buffer to be sent to the waiting child program without
waiting for end-of-line. If it is the first character of the
line, the read() in the user program returns 0, which
signifies end-of-file. This means to work as expected
a sendeof() has to be called at the begining of a line.
This method does not send a newline. It is the responsibility
of the caller to ensure the eof is sent at the beginning of a line.
"""
### Hmmm... how do I send an EOF?
###C if ((m = write(pty, *buf, p - *buf)) < 0)
###C return (errno == EWOULDBLOCK) ? n : -1;
fd = sys.stdin.fileno()
old = termios.tcgetattr(fd) # remember current state
new = termios.tcgetattr(fd)
new[3] = new[3] | termios.ICANON # ICANON must be set to recognize EOF
try: # use try/finally to ensure state gets restored
termios.tcsetattr(fd, termios.TCSADRAIN, new)
if 'CEOF' in dir(termios):
os.write (self.child_fd, '%c' % termios.CEOF)
else:
os.write (self.child_fd, '%c' % 4) # Silly platform does not define CEOF so assume CTRL-D
finally: # restore state
termios.tcsetattr(fd, termios.TCSADRAIN, old)
def eof (self):
"""This returns True if the EOF exception was ever raised.
"""
return self.flag_eof
def terminate(self, force=False):
"""This forces a child process to terminate.
It starts nicely with SIGHUP and SIGINT. If "force" is True then
moves onto SIGKILL.
This returns True if the child was terminated.
This returns False if the child could not be terminated.
"""
if not self.isalive():
return True
self.kill(signal.SIGHUP)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
self.kill(signal.SIGCONT)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
self.kill(signal.SIGINT)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
if force:
self.kill(signal.SIGKILL)
time.sleep(self.delayafterterminate)
if not self.isalive():
return True
else:
return False
return False
#raise ExceptionPexpect ('terminate() could not terminate child process. Try terminate(force=True)?')
def wait(self):
"""This waits until the child exits. This is a blocking call.
This will not read any data from the child, so this will block forever
if the child has unread output and has terminated. In other words, the child
may have printed output then called exit(); but, technically, the child is
still alive until its output is read.
"""
if self.isalive():
pid, status = os.waitpid(self.pid, 0)
else:
raise ExceptionPexpect ('Cannot wait for dead child process.')
self.exitstatus = os.WEXITSTATUS(status)
if os.WIFEXITED (status):
self.status = status
self.exitstatus = os.WEXITSTATUS(status)
self.signalstatus = None
self.terminated = True
elif os.WIFSIGNALED (status):
self.status = status
self.exitstatus = None
self.signalstatus = os.WTERMSIG(status)
self.terminated = True
elif os.WIFSTOPPED (status):
raise ExceptionPexpect ('Wait was called for a child process that is stopped. This is not supported. Is some other process attempting job control with our child pid?')
return self.exitstatus
def isalive(self):
"""This tests if the child process is running or not.
This is non-blocking. If the child was terminated then this
will read the exitstatus or signalstatus of the child.
This returns True if the child process appears to be running or False if not.
It can take literally SECONDS for Solaris to return the right status.
"""
if self.terminated:
return False
if self.flag_eof:
# This is for Linux, which requires the blocking form of waitpid to get
# status of a defunct process. This is super-lame. The flag_eof would have
# been set in read_nonblocking(), so this should be safe.
waitpid_options = 0
else:
waitpid_options = os.WNOHANG
try:
pid, status = os.waitpid(self.pid, waitpid_options)
except OSError, e: # No child processes
if e[0] == errno.ECHILD:
raise ExceptionPexpect ('isalive() encountered condition where "terminated" is 0, but there was no child process. Did someone else call waitpid() on our process?')
else:
raise e
# I have to do this twice for Solaris. I can't even believe that I figured this out...
# If waitpid() returns 0 it means that no child process wishes to
# report, and the value of status is undefined.
if pid == 0:
try:
pid, status = os.waitpid(self.pid, waitpid_options) ### os.WNOHANG) # Solaris!
except OSError, e: # This should never happen...
if e[0] == errno.ECHILD:
raise ExceptionPexpect ('isalive() encountered condition that should never happen. There was no child process. Did someone else call waitpid() on our process?')
else:
raise e
# If pid is still 0 after two calls to waitpid() then
# the process really is alive. This seems to work on all platforms, except
# for Irix which seems to require a blocking call on waitpid or select, so I let read_nonblocking
# take care of this situation (unfortunately, this requires waiting through the timeout).
if pid == 0:
return True
if pid == 0:
return True
if os.WIFEXITED (status):
self.status = status
self.exitstatus = os.WEXITSTATUS(status)
self.signalstatus = None
self.terminated = True
elif os.WIFSIGNALED (status):
self.status = status
self.exitstatus = None
self.signalstatus = os.WTERMSIG(status)
self.terminated = True
elif os.WIFSTOPPED (status):
raise ExceptionPexpect ('isalive() encountered condition where child process is stopped. This is not supported. Is some other process attempting job control with our child pid?')
return False
def kill(self, sig):
"""This sends the given signal to the child application.
In keeping with UNIX tradition it has a misleading name.
It does not necessarily kill the child unless
you send the right signal.
"""
# Same as os.kill, but the pid is given for you.
if self.isalive():
os.kill(self.pid, sig)
def compile_pattern_list(self, patterns):
"""This compiles a pattern-string or a list of pattern-strings.
Patterns must be a StringType, EOF, TIMEOUT, SRE_Pattern, or
a list of those. Patterns may also be None which results in
an empty list.
This is used by expect() when calling expect_list().
Thus expect() is nothing more than::
cpl = self.compile_pattern_list(pl)
return self.expect_list(clp, timeout)
If you are using expect() within a loop it may be more
efficient to compile the patterns first and then call expect_list().
This avoid calls in a loop to compile_pattern_list():
cpl = self.compile_pattern_list(my_pattern)
while some_condition:
...
i = self.expect_list(clp, timeout)
...
"""
if patterns is None:
return []
if type(patterns) is not types.ListType:
patterns = [patterns]
compile_flags = re.DOTALL # Allow dot to match \n
if self.ignorecase:
compile_flags = compile_flags | re.IGNORECASE
compiled_pattern_list = []
for p in patterns:
if type(p) is types.StringType:
compiled_pattern_list.append(re.compile(p, compile_flags))
elif p is EOF:
compiled_pattern_list.append(EOF)
elif p is TIMEOUT:
compiled_pattern_list.append(TIMEOUT)
elif type(p) is type(re.compile('')):
compiled_pattern_list.append(p)
else:
raise TypeError ('Argument must be one of StringType, EOF, TIMEOUT, SRE_Pattern, or a list of those type. %s' % str(type(p)))
return compiled_pattern_list
def expect(self, pattern, timeout = -1, searchwindowsize=None):
"""This seeks through the stream until a pattern is matched.
The pattern is overloaded and may take several types including a list.
The pattern can be a StringType, EOF, a compiled re, or a list of
those types. Strings will be compiled to re types. This returns the
index into the pattern list. If the pattern was not a list this
returns index 0 on a successful match. This may raise exceptions for
EOF or TIMEOUT. To avoid the EOF or TIMEOUT exceptions add
EOF or TIMEOUT to the pattern list.
After a match is found the instance attributes
'before', 'after' and 'match' will be set.
You can see all the data read before the match in 'before'.
You can see the data that was matched in 'after'.
The re.MatchObject used in the re match will be in 'match'.
If an error occured then 'before' will be set to all the
data read so far and 'after' and 'match' will be None.
If timeout is -1 then timeout will be set to the self.timeout value.
Note: A list entry may be EOF or TIMEOUT instead of a string.
This will catch these exceptions and return the index
of the list entry instead of raising the exception.
The attribute 'after' will be set to the exception type.
The attribute 'match' will be None.
This allows you to write code like this:
index = p.expect (['good', 'bad', pexpect.EOF, pexpect.TIMEOUT])
if index == 0:
do_something()
elif index == 1:
do_something_else()
elif index == 2:
do_some_other_thing()
elif index == 3:
do_something_completely_different()
instead of code like this:
try:
index = p.expect (['good', 'bad'])
if index == 0:
do_something()
elif index == 1:
do_something_else()
except EOF:
do_some_other_thing()
except TIMEOUT:
do_something_completely_different()
These two forms are equivalent. It all depends on what you want.
You can also just expect the EOF if you are waiting for all output
of a child to finish. For example:
p = pexpect.spawn('/bin/ls')
p.expect (pexpect.EOF)
print p.before
If you are trying to optimize for speed then see expect_list().
"""
compiled_pattern_list = self.compile_pattern_list(pattern)
return self.expect_list(compiled_pattern_list, timeout, searchwindowsize)
def expect_list(self, pattern_list, timeout = -1, searchwindowsize = -1):
"""This takes a list of compiled regular expressions and returns
the index into the pattern_list that matched the child output.
The list may also contain EOF or TIMEOUT (which are not
compiled regular expressions). This method is similar to
the expect() method except that expect_list() does not
recompile the pattern list on every call.
This may help if you are trying to optimize for speed, otherwise
just use the expect() method. This is called by expect().
If timeout==-1 then the self.timeout value is used.
If searchwindowsize==-1 then the self.searchwindowsize value is used.
"""
self.patterns = pattern_list
if timeout == -1:
timeout = self.timeout
if timeout is not None:
end_time = time.time() + timeout
if searchwindowsize == -1:
searchwindowsize = self.searchwindowsize
try:
incoming = self.buffer
while True: # Keep reading until exception or return.
# Sequence through the list of patterns looking for a match.
first_match = -1
for cre in pattern_list:
if cre is EOF or cre is TIMEOUT:
continue # The patterns for PexpectExceptions are handled differently.
if searchwindowsize is None: # search everything
match = cre.search(incoming)
else:
startpos = max(0, len(incoming) - searchwindowsize)
match = cre.search(incoming, startpos)
if match is None:
continue
if first_match > match.start() or first_match == -1:
first_match = match.start()
self.match = match
self.match_index = pattern_list.index(cre)
if first_match > -1:
self.buffer = incoming[self.match.end() : ]
self.before = incoming[ : self.match.start()]
self.after = incoming[self.match.start() : self.match.end()]
return self.match_index
# No match at this point
if timeout < 0 and timeout is not None:
raise TIMEOUT ('Timeout exceeded in expect_list().')
# Still have time left, so read more data
c = self.read_nonblocking (self.maxread, timeout)
time.sleep (0.0001)
incoming = incoming + c
if timeout is not None:
timeout = end_time - time.time()
except EOF, e:
self.buffer = ''
self.before = incoming
self.after = EOF
if EOF in pattern_list:
self.match = EOF
self.match_index = pattern_list.index(EOF)
return self.match_index
else:
self.match = None
self.match_index = None
raise EOF (str(e) + '\n' + str(self))
except TIMEOUT, e:
self.before = incoming
self.after = TIMEOUT
if TIMEOUT in pattern_list:
self.match = TIMEOUT
self.match_index = pattern_list.index(TIMEOUT)
return self.match_index
else:
self.match = None
self.match_index = None
raise TIMEOUT (str(e) + '\n' + str(self))
except Exception:
self.before = incoming
self.after = None
self.match = None
self.match_index = None
raise
def getwinsize(self):
"""This returns the terminal window size of the child tty.
The return value is a tuple of (rows, cols).
"""
if 'TIOCGWINSZ' in dir(termios):
TIOCGWINSZ = termios.TIOCGWINSZ
else:
TIOCGWINSZ = 1074295912L # assume if not defined
s = struct.pack('HHHH', 0, 0, 0, 0)
x = fcntl.ioctl(self.fileno(), TIOCGWINSZ, s)
return struct.unpack('HHHH', x)[0:2]
def setwinsize(self, r, c):
"""This sets the terminal window size of the child tty.
This will cause a SIGWINCH signal to be sent to the child.
This does not change the physical window size.
It changes the size reported to TTY-aware applications like
vi or curses -- applications that respond to the SIGWINCH signal.
"""
# Check for buggy platforms. Some Python versions on some platforms
# (notably OSF1 Alpha and RedHat 7.1) truncate the value for
# termios.TIOCSWINSZ. It is not clear why this happens.
# These platforms don't seem to handle the signed int very well;
# yet other platforms like OpenBSD have a large negative value for
# TIOCSWINSZ and they don't have a truncate problem.
# Newer versions of Linux have totally different values for TIOCSWINSZ.
# Note that this fix is a hack.
if 'TIOCSWINSZ' in dir(termios):
TIOCSWINSZ = termios.TIOCSWINSZ
else:
TIOCSWINSZ = -2146929561
if TIOCSWINSZ == 2148037735L: # L is not required in Python >= 2.2.
TIOCSWINSZ = -2146929561 # Same bits, but with sign.
# Note, assume ws_xpixel and ws_ypixel are zero.
s = struct.pack('HHHH', r, c, 0, 0)
fcntl.ioctl(self.fileno(), TIOCSWINSZ, s)
def interact(self, escape_character = chr(29), input_filter = None, output_filter = None):
"""This gives control of the child process to the interactive user
(the human at the keyboard).
Keystrokes are sent to the child process, and the stdout and stderr
output of the child process is printed.
This simply echos the child stdout and child stderr to the real
stdout and it echos the real stdin to the child stdin.
When the user types the escape_character this method will stop.
The default for escape_character is ^]. This should not be confused
with ASCII 27 -- the ESC character. ASCII 29 was chosen
for historical merit because this is the character used
by 'telnet' as the escape character. The escape_character will
not be sent to the child process.
You may pass in optional input and output filter functions.
These functions should take a string and return a string.
The output_filter will be passed all the output from the child process.
The input_filter will be passed all the keyboard input from the user.
The input_filter is run BEFORE the check for the escape_character.
Note that if you change the window size of the parent
the SIGWINCH signal will not be passed through to the child.
If you want the child window size to change when the parent's
window size changes then do something like the following example:
import pexpect, struct, fcntl, termios, signal, sys
def sigwinch_passthrough (sig, data):
s = struct.pack("HHHH", 0, 0, 0, 0)
a = struct.unpack('hhhh', fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ , s))
global p
p.setwinsize(a[0],a[1])
p = pexpect.spawn('/bin/bash') # Note this is global and used in sigwinch_passthrough.
signal.signal(signal.SIGWINCH, sigwinch_passthrough)
p.interact()
"""
# Flush the buffer.
self.stdout.write (self.buffer)
self.stdout.flush()
self.buffer = ''
mode = tty.tcgetattr(self.STDIN_FILENO)
tty.setraw(self.STDIN_FILENO)
try:
self.__interact_copy(escape_character, input_filter, output_filter)
finally:
tty.tcsetattr(self.STDIN_FILENO, tty.TCSAFLUSH, mode)
def __interact_writen(self, fd, data):
"""This is used by the interact() method.
"""
while data != '' and self.isalive():
n = os.write(fd, data)
data = data[n:]
def __interact_read(self, fd):
"""This is used by the interact() method.
"""
return os.read(fd, 1000)
def __interact_copy(self, escape_character = None, input_filter = None, output_filter = None):
"""This is used by the interact() method.
"""
while self.isalive():
r,w,e = self.__select([self.child_fd, self.STDIN_FILENO], [], [])
if self.child_fd in r:
data = self.__interact_read(self.child_fd)
if output_filter: data = output_filter(data)
if self.logfile is not None:
self.logfile.write (data)
self.logfile.flush()
os.write(self.STDOUT_FILENO, data)
if self.STDIN_FILENO in r:
data = self.__interact_read(self.STDIN_FILENO)
if input_filter: data = input_filter(data)
i = data.rfind(escape_character)
if i != -1:
data = data[:i]
self.__interact_writen(self.child_fd, data)
break
self.__interact_writen(self.child_fd, data)
def __select (self, iwtd, owtd, ewtd, timeout=None):
"""This is a wrapper around select.select() that ignores signals.
If select.select raises a select.error exception and errno is an EINTR error then
it is ignored. Mainly this is used to ignore sigwinch (terminal resize).
"""
# if select() is interrupted by a signal (errno==EINTR) then
# we loop back and enter the select() again.
if timeout is not None:
end_time = time.time() + timeout
while True:
try:
return select.select (iwtd, owtd, ewtd, timeout)
except select.error, e:
if e[0] == errno.EINTR:
# if we loop back we have to subtract the amount of time we already waited.
if timeout is not None:
timeout = end_time - time.time()
if timeout < 0:
return ([],[],[])
else: # something else caused the select.error, so this really is an exception
raise
##############################################################################
# The following methods are no longer supported or allowed..
def setmaxread (self, maxread):
"""This method is no longer supported or allowed.
I don't like getters and setters without a good reason.
"""
raise ExceptionPexpect ('This method is no longer supported or allowed. Just assign a value to the maxread member variable.')
def expect_exact (self, pattern_list, timeout = -1):
"""This method is no longer supported or allowed.
It was too hard to maintain and keep it up to date with expect_list.
Few people used this method. Most people favored reliability over speed.
The implementation is left in comments in case anyone needs to hack this
feature back into their copy.
If someone wants to diff this with expect_list and make them work
nearly the same then I will consider adding this make in.
"""
raise ExceptionPexpect ('This method is no longer supported or allowed.')
def setlog (self, fileobject):
"""This method is no longer supported or allowed.
"""
raise ExceptionPexpect ('This method is no longer supported or allowed. Just assign a value to the logfile member variable.')
##############################################################################
# End of spawn class
##############################################################################
def which (filename):
"""This takes a given filename; tries to find it in the environment path;
then checks if it is executable.
This returns the full path to the filename if found and executable.
Otherwise this returns None.
"""
# Special case where filename already contains a path.
if os.path.dirname(filename) != '':
if os.access (filename, os.X_OK):
return filename
if not os.environ.has_key('PATH') or os.environ['PATH'] == '':
p = os.defpath
else:
p = os.environ['PATH']
# Oddly enough this was the one line that made Pexpect
# incompatible with Python 1.5.2.
#pathlist = p.split (os.pathsep)
pathlist = string.split (p, os.pathsep)
for path in pathlist:
f = os.path.join(path, filename)
if os.access(f, os.X_OK):
return f
return None
def split_command_line(command_line):
"""This splits a command line into a list of arguments.
It splits arguments on spaces, but handles
embedded quotes, doublequotes, and escaped characters.
It's impossible to do this with a regular expression, so
I wrote a little state machine to parse the command line.
"""
arg_list = []
arg = ''
# Constants to name the states we can be in.
state_basic = 0
state_esc = 1
state_singlequote = 2
state_doublequote = 3
state_whitespace = 4 # The state of consuming whitespace between commands.
state = state_basic
for c in command_line:
if state == state_basic or state == state_whitespace:
if c == '\\': # Escape the next character
state = state_esc
elif c == r"'": # Handle single quote
state = state_singlequote
elif c == r'"': # Handle double quote
state = state_doublequote
elif c.isspace():
# Add arg to arg_list if we aren't in the middle of whitespace.
if state == state_whitespace:
None # Do nothing.
else:
arg_list.append(arg)
arg = ''
state = state_whitespace
else:
arg = arg + c
state = state_basic
elif state == state_esc:
arg = arg + c
state = state_basic
elif state == state_singlequote:
if c == r"'":
state = state_basic
else:
arg = arg + c
elif state == state_doublequote:
if c == r'"':
state = state_basic
else:
arg = arg + c
if arg != '':
arg_list.append(arg)
return arg_list
| apache-2.0 |
gspeedtech/Audacity-2 | lib-src/lv2/suil/waflib/Tools/gnu_dirs.py | 329 | 2796 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os
from waflib import Utils,Options,Context
_options=[x.split(', ')for x in'''
bindir, user executables, ${EXEC_PREFIX}/bin
sbindir, system admin executables, ${EXEC_PREFIX}/sbin
libexecdir, program executables, ${EXEC_PREFIX}/libexec
sysconfdir, read-only single-machine data, ${PREFIX}/etc
sharedstatedir, modifiable architecture-independent data, ${PREFIX}/com
localstatedir, modifiable single-machine data, ${PREFIX}/var
libdir, object code libraries, ${EXEC_PREFIX}/lib
includedir, C header files, ${PREFIX}/include
oldincludedir, C header files for non-gcc, /usr/include
datarootdir, read-only arch.-independent data root, ${PREFIX}/share
datadir, read-only architecture-independent data, ${DATAROOTDIR}
infodir, info documentation, ${DATAROOTDIR}/info
localedir, locale-dependent data, ${DATAROOTDIR}/locale
mandir, man documentation, ${DATAROOTDIR}/man
docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
htmldir, html documentation, ${DOCDIR}
dvidir, dvi documentation, ${DOCDIR}
pdfdir, pdf documentation, ${DOCDIR}
psdir, ps documentation, ${DOCDIR}
'''.split('\n')if x]
def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
env=conf.env
env.LIBDIR=env.BINDIR=[]
env.EXEC_PREFIX=get_param('EXEC_PREFIX',env.PREFIX)
env.PACKAGE=getattr(Context.g_module,'APPNAME',None)or env.PACKAGE
complete=False
iter=0
while not complete and iter<len(_options)+1:
iter+=1
complete=True
for name,help,default in _options:
name=name.upper()
if not env[name]:
try:
env[name]=Utils.subst_vars(get_param(name,default).replace('/',os.sep),env)
except TypeError:
complete=False
if not complete:
lst=[name for name,_,_ in _options if not env[name.upper()]]
raise conf.errors.WafError('Variable substitution failure %r'%lst)
def options(opt):
inst_dir=opt.add_option_group('Installation directories','By default, "waf install" will put the files in\
"/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
for k in('--prefix','--destdir'):
option=opt.parser.get_option(k)
if option:
opt.parser.remove_option(k)
inst_dir.add_option(option)
inst_dir.add_option('--exec-prefix',help='installation prefix [Default: ${PREFIX}]',default='',dest='EXEC_PREFIX')
dirs_options=opt.add_option_group('Pre-defined installation directories','')
for name,help,default in _options:
option_name='--'+name
str_default=default
str_help='%s [Default: %s]'%(help,str_default)
dirs_options.add_option(option_name,help=str_help,default='',dest=name.upper())
| gpl-2.0 |
plypaul/airflow | airflow/contrib/operators/bigquery_to_gcs.py | 20 | 4137 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class BigQueryToCloudStorageOperator(BaseOperator):
"""
Transfers a BigQuery table to a Google Cloud Storage bucket.
"""
template_fields = ('source_project_dataset_table', 'destination_cloud_storage_uris')
template_ext = ('.sql',)
ui_color = '#e4e6f0'
@apply_defaults
def __init__(
self,
source_project_dataset_table,
destination_cloud_storage_uris,
compression='NONE',
export_format='CSV',
field_delimiter=',',
print_header=True,
bigquery_conn_id='bigquery_default',
delegate_to=None,
*args,
**kwargs):
"""
Create a new BigQueryToCloudStorage to move data from BigQuery to
Google Cloud Storage. See here:
https://cloud.google.com/bigquery/docs/reference/v2/jobs
For more details about these parameters.
:param source_project_dataset_table: The dotted
(<project>.|<project>:)<dataset>.<table> BigQuery table to use as the source
data. If <project> is not included, project will be the project defined in
the connection json.
:type source_project_dataset_table: string
:param destination_cloud_storage_uris: The destination Google Cloud
Storage URI (e.g. gs://some-bucket/some-file.txt). Follows
convention defined here:
https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple
:type destination_cloud_storage_uris: list
:param compression: Type of compression to use.
:type compression: string
:param export_format: File format to export.
:type field_delimiter: string
:param field_delimiter: The delimiter to use when extracting to a CSV.
:type field_delimiter: string
:param print_header: Whether to print a header for a CSV file extract.
:type print_header: boolean
:param bigquery_conn_id: reference to a specific BigQuery hook.
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide
delegation enabled.
:type delegate_to: string
"""
super(BigQueryToCloudStorageOperator, self).__init__(*args, **kwargs)
self.source_project_dataset_table = source_project_dataset_table
self.destination_cloud_storage_uris = destination_cloud_storage_uris
self.compression = compression
self.export_format = export_format
self.field_delimiter = field_delimiter
self.print_header = print_header
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
def execute(self, context):
logging.info('Executing extract of %s into: %s',
self.source_project_dataset_table,
self.destination_cloud_storage_uris)
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_extract(
self.source_project_dataset_table,
self.destination_cloud_storage_uris,
self.compression,
self.export_format,
self.field_delimiter,
self.print_header)
| apache-2.0 |
seanli9jan/tensorflow | tensorflow/python/keras/engine/training_arrays.py | 2 | 15971 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Part of the Keras training engine related to plain array data.
"""
# pylint: disable=protected-access
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import errors
from tensorflow.python.keras import backend as K
from tensorflow.python.keras import callbacks as cbks
from tensorflow.python.keras.engine import training_utils
from tensorflow.python.keras.utils.generic_utils import make_batches
from tensorflow.python.keras.utils.generic_utils import Progbar
from tensorflow.python.keras.utils.generic_utils import slice_arrays
from tensorflow.python.platform import tf_logging as logging
try:
from scipy.sparse import issparse # pylint: disable=g-import-not-at-top
except ImportError:
issparse = None
def fit_loop(model,
inputs,
targets,
sample_weights=None,
batch_size=None,
epochs=100,
verbose=1,
callbacks=None,
val_inputs=None,
val_targets=None,
val_sample_weights=None,
shuffle=True,
initial_epoch=0,
steps_per_epoch=None,
validation_steps=None):
"""Abstract fit function for arrays of data.
Arguments:
model: Keras Model instance.
inputs: Either a list of arrays or a dictionary.
targets: List of target arrays.
sample_weights: Optional list of sample weight arrays.
batch_size: Integer batch size or None if unknown.
epochs: Number of times to iterate over the data
verbose: Verbosity mode, 0, 1 or 2
callbacks: List of callbacks to be called during training
val_inputs: List of input arrays.
val_targets: List of target arrays.
val_sample_weights: Optional list of sample weight arrays.
shuffle: Whether to shuffle the data at the beginning of each epoch
concatenation of list the display names of the outputs of
`f` and the list of display names of the outputs of `f_val`.
initial_epoch: Epoch at which to start training
(useful for resuming a previous training run)
steps_per_epoch: Total number of steps (batches of samples)
before declaring one epoch finished and starting the
next epoch. Ignored with the default value of `None`.
validation_steps: Number of steps to run validation for
(only if doing validation from data tensors).
Ignored with the default value of `None`.
Returns:
`History` object.
Raises:
ValueError: in case of invalid arguments.
"""
model._make_fit_function()
f = model._fit_function
sample_weights = sample_weights or []
val_sample_weights = val_sample_weights or []
inputs = training_utils.ModelInputs(inputs).as_list()
if model.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = inputs + targets + sample_weights + [1]
else:
ins = inputs + targets + sample_weights
do_validation = False
if val_inputs:
do_validation = True
if (steps_per_epoch is None and verbose and inputs and
hasattr(inputs[0], 'shape') and hasattr(val_inputs[0], 'shape')):
print('Train on %d samples, validate on %d samples' %
(inputs[0].shape[0], val_inputs[0].shape[0]))
if validation_steps:
do_validation = True
if steps_per_epoch is None:
raise ValueError('Can only use `validation_steps` '
'when doing step-wise '
'training, i.e. `steps_per_epoch` '
'must be set.')
num_train_samples = training_utils.check_num_samples(
ins, batch_size, steps_per_epoch, 'steps_per_epoch')
count_mode = 'steps' if steps_per_epoch else 'samples'
callbacks = cbks.configure_callbacks(
callbacks,
model,
do_validation=do_validation,
val_inputs=val_inputs,
val_targets=val_targets,
val_sample_weights=val_sample_weights,
batch_size=batch_size,
epochs=epochs,
steps_per_epoch=steps_per_epoch,
samples=num_train_samples,
validation_steps=validation_steps,
verbose=verbose,
count_mode=count_mode)
if num_train_samples is not None:
index_array = np.arange(num_train_samples)
# To prevent a slowdown, we find beforehand the arrays that need conversion.
feed = model._feed_inputs + model._feed_targets + model._feed_sample_weights
indices_for_conversion_to_dense = []
for i in range(len(feed)):
if issparse is not None and issparse(ins[i]) and not K.is_sparse(feed[i]):
indices_for_conversion_to_dense.append(i)
callbacks.on_train_begin()
for epoch in range(initial_epoch, epochs):
# Reset stateful metrics
for m in model.stateful_metric_functions:
m.reset_states()
# Update callbacks
callbacks.on_epoch_begin(epoch)
epoch_logs = {}
if steps_per_epoch is not None:
# Step-wise fit loop.
for step_index in range(steps_per_epoch):
batch_logs = {'batch': step_index, 'size': 1}
callbacks.on_batch_begin(step_index, batch_logs)
try:
outs = f(ins)
except errors.OutOfRangeError:
logging.warning('Your dataset iterator ran out of data; '
'interrupting training. Make sure that your dataset '
'can generate at least `steps_per_epoch * epochs` '
'batches (in this case, %d batches). You may need to'
'use the repeat() function when building your '
'dataset.' %
steps_per_epoch * epochs)
break
if not isinstance(outs, list):
outs = [outs]
for l, o in zip(model.metrics_names, outs):
batch_logs[l] = o
callbacks.on_batch_end(step_index, batch_logs)
if callbacks.model.stop_training:
break
if do_validation:
val_outs = test_loop(
model,
val_inputs,
val_targets,
sample_weights=val_sample_weights,
steps=validation_steps,
verbose=0)
if not isinstance(val_outs, list):
val_outs = [val_outs]
# Same labels assumed.
for l, o in zip(model.metrics_names, val_outs):
epoch_logs['val_' + l] = o
else:
# Sample-wise fit loop.
if shuffle == 'batch':
index_array = training_utils.batch_shuffle(index_array, batch_size)
elif shuffle:
np.random.shuffle(index_array)
batches = make_batches(num_train_samples, batch_size)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
try:
if isinstance(ins[-1], int):
# Do not slice the training phase flag.
ins_batch = slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = slice_arrays(ins, batch_ids)
except TypeError:
raise TypeError('TypeError while preparing batch. '
'If using HDF5 input data, '
'pass shuffle="batch".')
batch_logs = {}
batch_logs['batch'] = batch_index
batch_logs['size'] = len(batch_ids)
callbacks.on_batch_begin(batch_index, batch_logs)
for i in indices_for_conversion_to_dense:
ins_batch[i] = ins_batch[i].toarray()
outs = f(ins_batch)
if not isinstance(outs, list):
outs = [outs]
for l, o in zip(model.metrics_names, outs):
batch_logs[l] = o
callbacks.on_batch_end(batch_index, batch_logs)
if callbacks.model.stop_training:
break
if batch_index == len(batches) - 1: # Last batch.
if do_validation:
val_outs = test_loop(
model,
val_inputs,
val_targets,
sample_weights=val_sample_weights,
batch_size=batch_size,
verbose=0)
if not isinstance(val_outs, list):
val_outs = [val_outs]
# Same labels assumed.
for l, o in zip(model.metrics_names, val_outs):
epoch_logs['val_' + l] = o
callbacks.on_epoch_end(epoch, epoch_logs)
if callbacks.model.stop_training:
break
callbacks.on_train_end()
return model.history
def predict_loop(model, inputs, batch_size=32, verbose=0, steps=None):
"""Abstract method to loop over some data in batches.
Arguments:
model: Keras Model instance.
inputs: list of tensors to be fed to `f`.
batch_size: integer batch size.
verbose: verbosity mode.
steps: Total number of steps (batches of samples)
before declaring `_predict_loop` finished.
Ignored with the default value of `None`.
Returns:
Array of predictions (if the model has a single output)
or list of arrays of predictions
(if the model has multiple outputs).
"""
model._make_predict_function()
f = model.predict_function
inputs = training_utils.ModelInputs(inputs).as_list()
if model.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = inputs + [0]
else:
ins = inputs
num_samples = training_utils.check_num_samples(
inputs, batch_size, steps, 'steps')
if verbose == 1:
if steps is not None:
progbar = Progbar(target=steps)
else:
progbar = Progbar(target=num_samples)
indices_for_conversion_to_dense = []
for i in range(len(model._feed_inputs)):
if (issparse is not None and issparse(inputs[i]) and
not K.is_sparse(model._feed_inputs[i])):
indices_for_conversion_to_dense.append(i)
if steps is not None:
# Step-based predictions.
# Since we do not know how many samples
# we will see, we cannot pre-allocate
# the returned Numpy arrays.
# Instead, we store one array per batch seen
# and concatenate them upon returning.
unconcatenated_outs = []
for step in range(steps):
batch_outs = f(ins)
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
if step == 0:
for batch_out in batch_outs:
unconcatenated_outs.append([])
for i, batch_out in enumerate(batch_outs):
unconcatenated_outs[i].append(batch_out)
if verbose == 1:
progbar.update(step + 1)
if len(unconcatenated_outs) == 1:
return np.concatenate(unconcatenated_outs[0], axis=0)
return [
np.concatenate(unconcatenated_outs[i], axis=0)
for i in range(len(unconcatenated_outs))
]
else:
# Sample-based predictions.
outs = []
batches = make_batches(num_samples, batch_size)
index_array = np.arange(num_samples)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
if ins and isinstance(ins[-1], int):
# Do not slice the training phase flag.
ins_batch = slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = slice_arrays(ins, batch_ids)
for i in indices_for_conversion_to_dense:
ins_batch[i] = ins_batch[i].toarray()
batch_outs = f(ins_batch)
if not isinstance(batch_outs, list):
batch_outs = [batch_outs]
if batch_index == 0:
# Pre-allocate the results arrays.
for batch_out in batch_outs:
shape = (num_samples,) + batch_out.shape[1:]
outs.append(np.zeros(shape, dtype=batch_out.dtype))
for i, batch_out in enumerate(batch_outs):
outs[i][batch_start:batch_end] = batch_out
if verbose == 1:
progbar.update(batch_end)
if len(outs) == 1:
return outs[0]
return outs
def test_loop(model,
inputs,
targets,
sample_weights=None,
batch_size=None,
verbose=0,
steps=None):
"""Abstract method to loop over some data in batches.
Arguments:
model: Keras Model instance.
inputs: List of input arrays.
targets: List of target arrays.
sample_weights: Optional list of sample weight arrays.
batch_size: integer batch size or `None`.
verbose: verbosity mode.
steps: Total number of steps (batches of samples)
before declaring predictions finished.
Ignored with the default value of `None`.
Returns:
Scalar loss (if the model has a single output and no metrics)
or list of scalars (if the model has multiple outputs
and/or metrics). The attribute `model.metrics_names` will give you
the display labels for the scalar outputs.
"""
model._make_eval_function()
f = model._eval_function
sample_weights = sample_weights or []
inputs = training_utils.ModelInputs(inputs).as_list()
if model.uses_learning_phase and not isinstance(K.learning_phase(), int):
ins = inputs + targets + sample_weights + [0]
else:
ins = inputs + targets + sample_weights
if hasattr(model, 'metrics'):
for m in model.stateful_metric_functions:
m.reset_states()
num_samples = training_utils.check_num_samples(
ins, batch_size, steps, 'steps')
outs = []
if verbose == 1:
if steps is not None:
progbar = Progbar(target=steps)
else:
progbar = Progbar(target=num_samples)
# To prevent a slowdown, we find beforehand the arrays that need conversion.
feed = model._feed_inputs + model._feed_targets + model._feed_sample_weights
indices_for_conversion_to_dense = []
for i in range(len(feed)):
if issparse is not None and issparse(ins[i]) and not K.is_sparse(feed[i]):
indices_for_conversion_to_dense.append(i)
if steps is not None:
for step in range(steps):
batch_outs = f(ins)
if isinstance(batch_outs, list):
if step == 0:
for _ in enumerate(batch_outs):
outs.append(0.)
outs[0] += batch_outs[0] # index 0 = 'loss'
outs[1:] = batch_outs[1:]
else:
if step == 0:
outs.append(0.)
outs[0] += batch_outs
if verbose == 1:
progbar.update(step + 1)
outs[0] /= steps
else:
batches = make_batches(num_samples, batch_size)
index_array = np.arange(num_samples)
for batch_index, (batch_start, batch_end) in enumerate(batches):
batch_ids = index_array[batch_start:batch_end]
if isinstance(ins[-1], int):
# Do not slice the training phase flag.
ins_batch = slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
else:
ins_batch = slice_arrays(ins, batch_ids)
for i in indices_for_conversion_to_dense:
ins_batch[i] = ins_batch[i].toarray()
batch_outs = f(ins_batch)
if isinstance(batch_outs, list):
if batch_index == 0:
outs.extend([0.] * len(batch_outs))
outs[0] += batch_outs[0] * len(batch_ids) # index 0 = 'loss'
outs[1:] = batch_outs[1:]
else:
if batch_index == 0:
outs.append(0.)
outs[0] += batch_outs * len(batch_ids)
if verbose == 1:
progbar.update(batch_end)
outs[0] /= num_samples
if len(outs) == 1:
return outs[0]
return outs
| apache-2.0 |
urrego093/proyecto_mv | extras/build_web2py/setup_app.py | 15 | 5218 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
copy_apps = False
copy_scripts = True
copy_site_packages = True
remove_build_files = True
make_zip = True
zip_filename = "web2py_osx"
from setuptools import setup
from gluon.import_all import base_modules, contributed_modules
from gluon.fileutils import readlines_file
import os
import fnmatch
import shutil
import sys
import re
import zipfile
#read web2py version from VERSION file
web2py_version_line = readlines_file('VERSION')[0]
#use regular expression to get just the version number
v_re = re.compile('[0-9]+\.[0-9]+\.[0-9]+')
web2py_version = v_re.search(web2py_version_line).group(0)
class reglob:
def __init__(self, directory, pattern="*"):
self.stack = [directory]
self.pattern = pattern
self.files = []
self.index = 0
def __getitem__(self, index):
while 1:
try:
file = self.files[self.index]
self.index = self.index + 1
except IndexError:
self.index = 0
self.directory = self.stack.pop()
self.files = os.listdir(self.directory)
else:
fullname = os.path.join(self.directory, file)
if os.path.isdir(fullname) and not os.path.islink(fullname):
self.stack.append(fullname)
if not (file.startswith('.') or file.startswith('#') or file.endswith('~')) \
and fnmatch.fnmatch(file, self.pattern):
return fullname
setup(app=['web2py.py'],
version=web2py_version,
description="web2py web framework",
author="Massimo DiPierro",
license="LGPL v3",
data_files=[
'NEWINSTALL',
'ABOUT',
'LICENSE',
'VERSION',
'splashlogo.gif',
'logging.example.conf',
'options_std.py',
],
options={'py2app': {
'argv_emulation': True,
'includes': base_modules,
}},
setup_requires=['py2app'])
def copy_folders(source, destination):
"""Copy files & folders from source to destination (within dist/)"""
print 'copying %s -> %s' % (source, destination)
base = 'dist/web2py.app/Contents/Resources/'
if os.path.exists(os.path.join(base, destination)):
shutil.rmtree(os.path.join(base, destination))
shutil.copytree(os.path.join(source), os.path.join(base, destination))
#Should we include applications?
copy_folders('gluon','gluon')
if copy_apps:
copy_folders('applications', 'applications')
print "Your application(s) have been added"
else:
#only copy web2py's default applications
copy_folders('applications/admin', 'applications/admin')
copy_folders('applications/welcome', 'applications/welcome')
copy_folders('applications/examples', 'applications/examples')
print "Only web2py's admin, examples & welcome applications have been added"
#should we copy project's site-packages into dist/site-packages
if copy_site_packages:
#copy site-packages
copy_folders('site-packages', 'site-packages')
else:
#no worries, web2py will create the (empty) folder first run
print "Skipping site-packages"
pass
#should we copy project's scripts into dist/scripts
if copy_scripts:
#copy scripts
copy_folders('scripts', 'scripts')
else:
#no worries, web2py will create the (empty) folder first run
print "Skipping scripts"
pass
#borrowed from http://bytes.com/topic/python/answers/851018-how-zip-directory-python-using-zipfile
def recursive_zip(zipf, directory, folder=""):
for item in os.listdir(directory):
if os.path.isfile(os.path.join(directory, item)):
zipf.write(os.path.join(directory, item), folder + os.sep + item)
elif os.path.isdir(os.path.join(directory, item)):
recursive_zip(
zipf, os.path.join(directory, item), folder + os.sep + item)
#should we create a zip file of the build?
if make_zip:
#to keep consistent with how official web2py windows zip file is setup,
#create a web2py folder & copy dist's files into it
shutil.copytree('dist', 'zip_temp/web2py')
#create zip file
#use filename specified via command line
zipf = zipfile.ZipFile(
zip_filename + ".zip", "w", compression=zipfile.ZIP_DEFLATED)
path = 'zip_temp' # just temp so the web2py directory is included in our zip file
recursive_zip(
zipf, path) # leave the first folder as None, as path is root.
zipf.close()
shutil.rmtree('zip_temp')
print "Your Windows binary version of web2py can be found in " + \
zip_filename + ".zip"
print "You may extract the archive anywhere and then run web2py/web2py.exe"
#should py2exe build files be removed?
if remove_build_files:
shutil.rmtree('build')
shutil.rmtree('deposit')
shutil.rmtree('dist')
print "py2exe build files removed"
#final info
if not make_zip and not remove_build_files:
print "Your Windows binary & associated files can also be found in /dist"
print "Finished!"
print "Enjoy web2py " + web2py_version_line
| gpl-3.0 |
owlabs/incubator-airflow | airflow/models/taskreschedule.py | 1 | 3374 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""TaskReschedule tracks rescheduled task instances."""
from sqlalchemy import Column, ForeignKeyConstraint, Index, Integer, String, asc
from airflow.models.base import Base, ID_LEN
from airflow.utils.db import provide_session
from airflow.utils.sqlalchemy import UtcDateTime
class TaskReschedule(Base):
"""
TaskReschedule tracks rescheduled task instances.
"""
__tablename__ = "task_reschedule"
id = Column(Integer, primary_key=True)
task_id = Column(String(ID_LEN), nullable=False)
dag_id = Column(String(ID_LEN), nullable=False)
execution_date = Column(UtcDateTime, nullable=False)
try_number = Column(Integer, nullable=False)
start_date = Column(UtcDateTime, nullable=False)
end_date = Column(UtcDateTime, nullable=False)
duration = Column(Integer, nullable=False)
reschedule_date = Column(UtcDateTime, nullable=False)
__table_args__ = (
Index('idx_task_reschedule_dag_task_date', dag_id, task_id, execution_date,
unique=False),
ForeignKeyConstraint([task_id, dag_id, execution_date],
['task_instance.task_id', 'task_instance.dag_id',
'task_instance.execution_date'],
name='task_reschedule_dag_task_date_fkey',
ondelete='CASCADE')
)
def __init__(self, task, execution_date, try_number, start_date, end_date,
reschedule_date):
self.dag_id = task.dag_id
self.task_id = task.task_id
self.execution_date = execution_date
self.try_number = try_number
self.start_date = start_date
self.end_date = end_date
self.reschedule_date = reschedule_date
self.duration = (self.end_date - self.start_date).total_seconds()
@staticmethod
@provide_session
def find_for_task_instance(task_instance, session):
"""
Returns all task reschedules for the task instance and try number,
in ascending order.
:param task_instance: the task instance to find task reschedules for
:type task_instance: airflow.models.TaskInstance
"""
TR = TaskReschedule
return (
session
.query(TR)
.filter(TR.dag_id == task_instance.dag_id,
TR.task_id == task_instance.task_id,
TR.execution_date == task_instance.execution_date,
TR.try_number == task_instance.try_number)
.order_by(asc(TR.id))
.all()
)
| apache-2.0 |
cykerway/wmwm | setup.py | 1 | 7315 | #!/usr/bin/env python3
'''
setuptools based setup module;
see <https://packaging.python.org/en/latest/distributing.html>;
'''
from os import path
from setuptools import find_packages
from setuptools import setup
here = path.abspath(path.dirname(__file__))
## get long description from readme file;
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
## ========================================================================
## required for pypi upload;
## ========================================================================
## project name;
##
## this determines how users install this project:
##
## pip install sampleproject
##
## and where this project lives on pypi:
##
## <https://pypi.org/project/sampleproject/>
##
## this name is registered for you the first time you publish this package;
##
## name specification:
##
## <https://packaging.python.org/specifications/core-metadata/#name>
##
name='awd',
## project version;
##
## version specification (pep 440):
##
## <https://www.python.org/dev/peps/pep-0440/>;
##
## single-sourcing techniques:
##
## <https://packaging.python.org/en/latest/single_source_version.html>
##
version='1.3.4',
## project homepage;
##
## this arg corresponds to "home-page" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#home-page-optional>
##
url='https://github.com/cykerway/awd',
## author name;
author='Cyker Way',
## author email address;
author_email='cykerway@example.com',
## packages;
##
## you can provide a list of packages manually or use `find_packages()`;
##
packages=find_packages(exclude=['contrib', 'docs', 'tests']),
## ========================================================================
## optional for pypi upload;
## ========================================================================
## a one-line description;
##
## this arg corresponds to "summary" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#summary>
##
description='a window director;',
## a longer description shown on project homepage on pypi;
##
## this is often the same as the readme;
##
## this arg corresponds to "description" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-optional>
##
long_description=long_description,
## longer description content type;
##
## valid values are: `text/plain`, `text/x-rst`, `text/markdown`;
##
## this arg corresponds to "description-content-type" metadata field:
##
## <https://packaging.python.org/specifications/core-metadata/#description-content-type-optional>
##
long_description_content_type='text/markdown',
## classifiers categorizing this project;
##
## see <https://pypi.org/classifiers/>;
##
classifiers=[
## development status;
# 'Development Status :: 3 - Alpha',
'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
## intended audience;
# 'Intended Audience :: Developers',
'Intended Audience :: End Users/Desktop',
## topic;
'Topic :: Desktop Environment',
# 'Topic :: Games/Entertainment',
# 'Topic :: Multimedia',
# 'Topic :: Office/Business',
# 'Topic :: Scientific/Engineering',
# 'Topic :: Software Development',
# 'Topic :: System',
## license;
'License :: OSI Approved :: GNU General Public License v3 (GPLv3)',
# 'License :: OSI Approved :: BSD License',
# 'License :: OSI Approved :: MIT License',
## supported python versions;
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
## project keywords;
##
## these keywords will appear on the project page;
##
keywords='window layout',
## package data;
##
## this is a dict mapping package names to a list of relative path names
## (or glob patterns) that should be copied into the package when
## installed; the path names are interpreted relative to the package dir;
##
package_data={
# 'sample': ['*.bin'],
},
## additional data files;
##
## this is a sequence of `(dir, files)` pairs; each `(dir, files)` pair
## specifies the install dir and the files to install there; if `dir` is a
## relative path, it is relative to the install prefix (`sys.prefix` or
## `sys.exec_prefix`); each file in `files` is interpreted relative to the
## `setup.py` script;
##
## see <https://docs.python.org/3/distutils/setupscript.html#installing-additional-files>;
##
data_files=[
# ('data_files', ['data/data0.bin', 'data/data1.bin']),
],
## package dependencies;
##
## this is a list of packages that this project depends on; these packages
## will be installed by pip when this project is installed;
##
install_requires=[
'argparse-ext',
'ewmh-ext',
'logging-ext',
'python-xlib',
],
## extra package dependencies;
##
## this is a dict mapping extras (optional features of this project) to a
## list of packages that those extras depend on;
##
## users will be able to install these using the extras syntax:
##
## pip install sampleproject[dev]
##
## see <https://setuptools.readthedocs.io/en/latest/setuptools.html#declaring-extras-optional-features-with-their-own-dependencies>
##
extras_require={
# 'dev': ['check-manifest'],
# 'test': ['coverage'],
},
## to create executable scripts, use entry points:
##
## <https://setuptools.readthedocs.io/en/latest/setuptools.html#automatic-script-creation>
##
## for example, the following would provide a console script `sample-cli`
## which executes the `main` function in package `sample.cli`, and a gui
## script `sample-gui` which executes the `main` function in package
## `sample.gui`;
entry_points={
'console_scripts': [
'awd=awd.__main__:main',
],
# 'gui_scripts': [
# 'sample-gui=sample.gui:main',
# ],
},
## additional urls that are relevant to this project;
##
## examples include: where the package tracks issues, where the source is
## hosted, where to say thanks to the package maintainers, and where to
## support the project financially; the keys are used to render the link
## texts on pypi;
##
## this arg corresponds to "project-url" metadata fields:
##
## <https://packaging.python.org/specifications/core-metadata/#project-url-multiple-use>
##
project_urls={
'Bug Reports': 'https://github.com/cykerway/awd/issues',
# 'Funding': 'https://donate.pypi.org',
# 'Say Thanks!': 'http://saythanks.io/to/example',
'Source': 'https://github.com/cykerway/awd/',
},
)
| gpl-3.0 |
0k/odoo | addons/auth_oauth/__openerp__.py | 20 | 1628 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010-2012 OpenERP s.a. (<http://openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'OAuth2 Authentication',
'version': '1.0',
'category': 'Tools',
'description': """
Allow users to login through OAuth2 Provider.
=============================================
""",
'author': 'OpenERP s.a.',
'maintainer': 'OpenERP s.a.',
'website': 'https://www.odoo.com',
'depends': ['base', 'web', 'base_setup', 'auth_signup'],
'data': [
'res_users.xml',
'auth_oauth_data.xml',
'auth_oauth_data.yml',
'auth_oauth_view.xml',
'security/ir.model.access.csv',
'views/auth_oauth_login.xml',
],
'installable': True,
'auto_install': False,
}
| agpl-3.0 |
newerthcom/savagerebirth | libs/python-2.72/Mac/Modules/ctl/ctlsupport.py | 39 | 27042 | # This script generates a Python interface for an Apple Macintosh Manager.
# It uses the "bgen" package to generate C code.
# The function specifications are generated by scanning the mamager's header file,
# using the "scantools" package (customized for this particular manager).
import string
# Declarations that change for each manager
MACHEADERFILE = 'Controls.h' # The Apple header file
MODNAME = '_Ctl' # The name of the module
OBJECTNAME = 'Control' # The basic name of the objects used here
# The following is *usually* unchanged but may still require tuning
MODPREFIX = 'Ctl' # The prefix for module-wide routines
OBJECTTYPE = OBJECTNAME + 'Handle' # The C type used to represent them
OBJECTPREFIX = MODPREFIX + 'Obj' # The prefix for object methods
INPUTFILE = string.lower(MODPREFIX) + 'gen.py' # The file generated by the scanner
OUTPUTFILE = MODNAME + "module.c" # The file generated by this program
from macsupport import *
# Create the type objects
ControlHandle = OpaqueByValueType(OBJECTTYPE, OBJECTPREFIX)
ControlRef = ControlHandle
ExistingControlHandle = OpaqueByValueType(OBJECTTYPE, "CtlObj_WhichControl", "BUG")
RgnHandle = OpaqueByValueType("RgnHandle", "ResObj")
CCTabHandle = OpaqueByValueType("CCTabHandle", "ResObj")
AuxCtlHandle = OpaqueByValueType("AuxCtlHandle", "ResObj")
ControlPartCode = Type("ControlPartCode", "h")
DragConstraint = Type("DragConstraint", "H")
ControlVariant = Type("ControlVariant", "h")
IconTransformType = Type("IconTransformType", "h")
EventModifiers = Type("EventModifiers", "H")
ClickActivationResult = Type("ClickActivationResult", "l")
ControlButtonGraphicAlignment = Type("ControlButtonGraphicAlignment", "h")
ControlButtonTextAlignment = Type("ControlButtonTextAlignment", "h")
ControlButtonTextPlacement = Type("ControlButtonTextPlacement", "h")
ControlContentType = Type("ControlContentType", "h")
ControlFocusPart = Type("ControlFocusPart", "h")
ControlFontStyleRec = OpaqueType('ControlFontStyleRec', 'ControlFontStyle')
ControlFontStyleRec_ptr = ControlFontStyleRec
ControlID = OpaqueType('ControlID', 'PyControlID')
ControlID_ptr = ControlID
DragTrackingMessage = Type("DragTrackingMessage", "h")
DragReference = OpaqueByValueType("DragReference", "DragObj")
CFStringRef = OpaqueByValueType("CFStringRef", "CFStringRefObj")
CFMutableStringRef = OpaqueByValueType("CFMutableStringRef", "CFMutableStringRefObj")
CFDataRef = OpaqueByValueType("CFDataRef", "CFDataRefObj")
ControlTabSize = UInt16
ControlTabDirection = UInt16
ControlPopupArrowOrientation = UInt16
ControlPopupArrowSize = UInt16
ControlClockType = UInt16
ControlClockFlags = UInt32
ControlRoundButtonSize = SInt16
DataBrowserViewStyle = OSType
DataBrowserItemID = UInt32
DataBrowserEditCommand = UInt32
DataBrowserSelectionAnchorDirection = UInt32
DataBrowserItemState = UInt32
DataBrowserPropertyID = UInt32
DataBrowserRevealOptions = UInt8
DataBrowserSortOrder = UInt16
DataBrowserSelectionFlags = UInt32
DataBrowserPropertyFlags = UInt32
DataBrowserPropertyPart = OSType
DataBrowserTableViewColumnID = DataBrowserPropertyID
#DataBrowserTableViewColumnDesc = DataBrowserPropertyDesc
DataBrowserTableViewHiliteStyle = UInt32
DataBrowserTableViewRowIndex = UInt32
DataBrowserTableViewColumnIndex = UInt32
DataBrowserPropertyType = OSType
ControlDisclosureTriangleOrientation = UInt16
DataBrowserTableViewColumnDesc = OpaqueType("DataBrowserTableViewColumnDesc",
"DataBrowserTableViewColumnDesc")
DataBrowserListViewColumnDesc = OpaqueType("DataBrowserListViewColumnDesc",
"DataBrowserListViewColumnDesc")
ControlButtonContentInfo = OpaqueType("ControlButtonContentInfo",
"ControlButtonContentInfo")
ControlButtonContentInfoPtr = ControlButtonContentInfo_ptr = ControlButtonContentInfo
ControlTabEntry_ptr = OpaqueType("ControlTabEntry", "ControlTabEntry")
ControlBevelThickness = UInt16
ControlBevelButtonBehavior = UInt16
ControlBevelButtonMenuBehavior = UInt16
ControlBevelButtonMenuPlacement = UInt16
ControlPushButtonIconAlignment = UInt16
class ControlActionDefinition(Type):
def declare(self, name):
Output("%s %s;", self.typeName, name)
Output("UniversalProcPtr c_callback;")
def passInput(self, name):
return "myactionproc_upp"
def cleanup(self, name):
Output("setcallback((PyObject*)_self, kMyControlActionProcTag, actionProc, &c_callback);")
class ControlActionDefinitionNewControl(ControlActionDefinition):
def cleanup(self, name):
Output("setcallback(_res, kMyControlActionProcTag, liveTrackingProc, &c_callback);")
ControlActionUPP = ControlActionDefinition("PyObject*", "O")
ControlActionUPPNewControl = ControlActionDefinitionNewControl("PyObject*", "O")
ControlSliderOrientation = UInt16
includestuff = includestuff + """
#include <Carbon/Carbon.h>
#ifdef USE_TOOLBOX_OBJECT_GLUE
extern PyObject *_CtlObj_New(ControlHandle);
extern int _CtlObj_Convert(PyObject *, ControlHandle *);
#define CtlObj_New _CtlObj_New
#define CtlObj_Convert _CtlObj_Convert
#endif
static PyObject *CtlObj_WhichControl(ControlHandle);
#define as_Control(h) ((ControlHandle)h)
#define as_Resource(ctl) ((Handle)ctl)
#define GetControlRect(ctl, rectp) GetControlBounds(ctl, rectp)
#define MAXTABS 32 /* maximum number of tabs that we support in a tabs control */
/*
** Parse/generate ControlFontStyleRec records
*/
#if 0 /* Not needed */
static PyObject *
ControlFontStyle_New(ControlFontStyleRec *itself)
{
return Py_BuildValue("hhhhhhO&O&", itself->flags, itself->font,
itself->size, itself->style, itself->mode, itself->just,
QdRGB_New, &itself->foreColor, QdRGB_New, &itself->backColor);
}
#endif
static int
ControlFontStyle_Convert(PyObject *v, ControlFontStyleRec *itself)
{
return PyArg_Parse(v, "(hhhhhhO&O&)", &itself->flags,
&itself->font, &itself->size, &itself->style, &itself->mode,
&itself->just, QdRGB_Convert, &itself->foreColor,
QdRGB_Convert, &itself->backColor);
}
/*
** Parse/generate ControlID records
*/
static PyObject *
PyControlID_New(ControlID *itself)
{
return Py_BuildValue("O&l", PyMac_BuildOSType, itself->signature, itself->id);
}
static int
PyControlID_Convert(PyObject *v, ControlID *itself)
{
return PyArg_Parse(v, "(O&l)", PyMac_GetOSType, &itself->signature, &itself->id);
}
/*
** generate DataBrowserListViewColumnDesc records
*/
static int
DataBrowserTableViewColumnDesc_Convert(PyObject *v, DataBrowserTableViewColumnDesc *itself)
{
return PyArg_Parse(v, "(lO&l)",
&itself->propertyID,
PyMac_GetOSType, &itself->propertyType,
&itself->propertyFlags);
}
static int
ControlButtonContentInfo_Convert(PyObject *v, ControlButtonContentInfo *itself)
{
return PyArg_Parse(v, "(hO&)",
&itself->contentType,
OptResObj_Convert, &itself->u.iconSuite);
}
static int
DataBrowserListViewHeaderDesc_Convert(PyObject *v, DataBrowserListViewHeaderDesc *itself)
{
itself->version = kDataBrowserListViewLatestHeaderDesc;
return PyArg_Parse(v, "(HHhO&HO&O&)",
&itself->minimumWidth,
&itself->maximumWidth,
&itself->titleOffset,
CFStringRefObj_Convert, &itself->titleString,
&itself->initialOrder,
ControlFontStyle_Convert, &itself->btnFontStyle,
ControlButtonContentInfo_Convert, &itself->btnContentInfo);
}
static int
DataBrowserListViewColumnDesc_Convert(PyObject *v, DataBrowserListViewColumnDesc *itself)
{
return PyArg_Parse(v, "(O&O&)",
DataBrowserTableViewColumnDesc_Convert, &itself->propertyDesc,
DataBrowserListViewHeaderDesc_Convert, &itself->headerBtnDesc);
}
/* TrackControl and HandleControlClick callback support */
#define kMyControlActionProcTag 'ACTN' /* not an official tag, only for internal use */
static PyObject *tracker;
static ControlActionUPP mytracker_upp;
static ControlActionUPP myactionproc_upp;
static ControlUserPaneKeyDownUPP mykeydownproc_upp;
static ControlUserPaneFocusUPP myfocusproc_upp;
static ControlUserPaneDrawUPP mydrawproc_upp;
static ControlUserPaneIdleUPP myidleproc_upp;
static ControlUserPaneHitTestUPP myhittestproc_upp;
static ControlUserPaneTrackingUPP mytrackingproc_upp;
static int settrackfunc(PyObject *); /* forward */
static void clrtrackfunc(void); /* forward */
static int setcallback(PyObject *, OSType, PyObject *, UniversalProcPtr *);
"""
finalstuff = finalstuff + """
static PyObject *
CtlObj_NewUnmanaged(ControlHandle itself)
{
ControlObject *it;
if (itself == NULL) return PyMac_Error(resNotFound);
it = PyObject_NEW(ControlObject, &Control_Type);
if (it == NULL) return NULL;
it->ob_itself = itself;
it->ob_callbackdict = NULL;
return (PyObject *)it;
}
static PyObject *
CtlObj_WhichControl(ControlHandle c)
{
PyObject *it;
if (c == NULL)
it = Py_None;
else {
it = (PyObject *) GetControlReference(c);
/*
** If the refcon is zero or doesn't point back to the Python object
** the control is not ours. Return a temporary object.
*/
if (it == NULL || ((ControlObject *)it)->ob_itself != c)
return CtlObj_NewUnmanaged(c);
}
Py_INCREF(it);
return it;
}
static int
settrackfunc(PyObject *obj)
{
if (tracker) {
PyErr_SetString(Ctl_Error, "Tracker function in use");
return 0;
}
tracker = obj;
Py_INCREF(tracker);
return 1;
}
static void
clrtrackfunc(void)
{
Py_XDECREF(tracker);
tracker = 0;
}
static pascal void
mytracker(ControlHandle ctl, short part)
{
PyObject *args, *rv=0;
args = Py_BuildValue("(O&i)", CtlObj_WhichControl, ctl, (int)part);
if (args && tracker) {
rv = PyEval_CallObject(tracker, args);
Py_DECREF(args);
}
if (rv)
Py_DECREF(rv);
else {
PySys_WriteStderr("TrackControl or HandleControlClick: exception in tracker function\\n");
PyErr_Print();
}
}
static int
setcallback(PyObject *myself, OSType which, PyObject *callback, UniversalProcPtr *uppp)
{
ControlObject *self = (ControlObject *)myself;
char keybuf[9];
if ( which == kMyControlActionProcTag )
*uppp = (UniversalProcPtr)myactionproc_upp;
else if ( which == kControlUserPaneKeyDownProcTag )
*uppp = (UniversalProcPtr)mykeydownproc_upp;
else if ( which == kControlUserPaneFocusProcTag )
*uppp = (UniversalProcPtr)myfocusproc_upp;
else if ( which == kControlUserPaneDrawProcTag )
*uppp = (UniversalProcPtr)mydrawproc_upp;
else if ( which == kControlUserPaneIdleProcTag )
*uppp = (UniversalProcPtr)myidleproc_upp;
else if ( which == kControlUserPaneHitTestProcTag )
*uppp = (UniversalProcPtr)myhittestproc_upp;
else if ( which == kControlUserPaneTrackingProcTag )
*uppp = (UniversalProcPtr)mytrackingproc_upp;
else
return -1;
/* Only now do we test for clearing of the callback: */
if ( callback == Py_None )
*uppp = NULL;
/* Create the dict if it doesn't exist yet (so we don't get such a dict for every control) */
if ( self->ob_callbackdict == NULL )
if ( (self->ob_callbackdict = PyDict_New()) == NULL )
return -1;
/* And store the Python callback */
sprintf(keybuf, "%x", (unsigned)which);
if (PyDict_SetItemString(self->ob_callbackdict, keybuf, callback) < 0)
return -1;
return 0;
}
static PyObject *
callcallback(ControlObject *self, OSType which, PyObject *arglist)
{
char keybuf[9];
PyObject *func, *rv;
sprintf(keybuf, "%x", (unsigned)which);
if ( self->ob_callbackdict == NULL ||
(func = PyDict_GetItemString(self->ob_callbackdict, keybuf)) == NULL ) {
PySys_WriteStderr("Control callback %x without callback object\\n", (unsigned)which);
return NULL;
}
rv = PyEval_CallObject(func, arglist);
if ( rv == NULL ) {
PySys_WriteStderr("Exception in control callback %x handler\\n", (unsigned)which);
PyErr_Print();
}
return rv;
}
static pascal void
myactionproc(ControlHandle control, SInt16 part)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("Oh", ctl_obj, part);
rv = callcallback(ctl_obj, kMyControlActionProcTag, arglist);
Py_XDECREF(arglist);
Py_XDECREF(rv);
}
static pascal ControlPartCode
mykeydownproc(ControlHandle control, SInt16 keyCode, SInt16 charCode, SInt16 modifiers)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
short c_rv = 0;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("Ohhh", ctl_obj, keyCode, charCode, modifiers);
rv = callcallback(ctl_obj, kControlUserPaneKeyDownProcTag, arglist);
Py_XDECREF(arglist);
if ( rv )
if (!PyArg_Parse(rv, "h", &c_rv))
PyErr_Clear();
Py_XDECREF(rv);
return (ControlPartCode)c_rv;
}
static pascal ControlPartCode
myfocusproc(ControlHandle control, ControlPartCode part)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
short c_rv = kControlFocusNoPart;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("Oh", ctl_obj, part);
rv = callcallback(ctl_obj, kControlUserPaneFocusProcTag, arglist);
Py_XDECREF(arglist);
if ( rv )
if (!PyArg_Parse(rv, "h", &c_rv))
PyErr_Clear();
Py_XDECREF(rv);
return (ControlPartCode)c_rv;
}
static pascal void
mydrawproc(ControlHandle control, SInt16 part)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("Oh", ctl_obj, part);
rv = callcallback(ctl_obj, kControlUserPaneDrawProcTag, arglist);
Py_XDECREF(arglist);
Py_XDECREF(rv);
}
static pascal void
myidleproc(ControlHandle control)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("O", ctl_obj);
rv = callcallback(ctl_obj, kControlUserPaneIdleProcTag, arglist);
Py_XDECREF(arglist);
Py_XDECREF(rv);
}
static pascal ControlPartCode
myhittestproc(ControlHandle control, Point where)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
short c_rv = -1;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
arglist = Py_BuildValue("OO&", ctl_obj, PyMac_BuildPoint, where);
rv = callcallback(ctl_obj, kControlUserPaneHitTestProcTag, arglist);
Py_XDECREF(arglist);
/* Ignore errors, nothing we can do about them */
if ( rv )
if (!PyArg_Parse(rv, "h", &c_rv))
PyErr_Clear();
Py_XDECREF(rv);
return (ControlPartCode)c_rv;
}
static pascal ControlPartCode
mytrackingproc(ControlHandle control, Point startPt, ControlActionUPP actionProc)
{
ControlObject *ctl_obj;
PyObject *arglist, *rv;
short c_rv = -1;
ctl_obj = (ControlObject *)CtlObj_WhichControl(control);
/* We cannot pass the actionProc without lots of work */
arglist = Py_BuildValue("OO&", ctl_obj, PyMac_BuildPoint, startPt);
rv = callcallback(ctl_obj, kControlUserPaneTrackingProcTag, arglist);
Py_XDECREF(arglist);
if ( rv )
if (!PyArg_Parse(rv, "h", &c_rv))
PyErr_Clear();
Py_XDECREF(rv);
return (ControlPartCode)c_rv;
}
"""
initstuff = initstuff + """
mytracker_upp = NewControlActionUPP(mytracker);
myactionproc_upp = NewControlActionUPP(myactionproc);
mykeydownproc_upp = NewControlUserPaneKeyDownUPP(mykeydownproc);
myfocusproc_upp = NewControlUserPaneFocusUPP(myfocusproc);
mydrawproc_upp = NewControlUserPaneDrawUPP(mydrawproc);
myidleproc_upp = NewControlUserPaneIdleUPP(myidleproc);
myhittestproc_upp = NewControlUserPaneHitTestUPP(myhittestproc);
mytrackingproc_upp = NewControlUserPaneTrackingUPP(mytrackingproc);
PyMac_INIT_TOOLBOX_OBJECT_NEW(ControlHandle, CtlObj_New);
PyMac_INIT_TOOLBOX_OBJECT_CONVERT(ControlHandle, CtlObj_Convert);
"""
class MyObjectDefinition(PEP253Mixin, ObjectIdentityMixin, GlobalObjectDefinition):
def outputStructMembers(self):
GlobalObjectDefinition.outputStructMembers(self)
Output("PyObject *ob_callbackdict;")
def outputCheckNewArg(self):
Output("if (itself == NULL) return PyMac_Error(resNotFound);")
def outputInitStructMembers(self):
GlobalObjectDefinition.outputInitStructMembers(self)
Output("SetControlReference(itself, (long)it);")
Output("it->ob_callbackdict = NULL;")
def outputCleanupStructMembers(self):
Output("Py_XDECREF(self->ob_callbackdict);")
Output("if (self->ob_itself)SetControlReference(self->ob_itself, (long)0); /* Make it forget about us */")
# Create the generator groups and link them
module = MacModule(MODNAME, MODPREFIX, includestuff, finalstuff, initstuff)
object = MyObjectDefinition(OBJECTNAME, OBJECTPREFIX, OBJECTTYPE)
module.addobject(object)
# Create the generator classes used to populate the lists
Function = OSErrWeakLinkFunctionGenerator
Method = OSErrWeakLinkMethodGenerator
# Create and populate the lists
functions = []
methods = []
execfile(INPUTFILE)
execfile('ctledit.py')
# add the populated lists to the generator groups
for f in functions: module.add(f)
for f in methods: object.add(f)
# Manual generator for TrackControl, due to callback ideosyncracies
trackcontrol_body = """
ControlPartCode _rv;
Point startPoint;
ControlActionUPP upp = 0;
PyObject *callback = 0;
if (!PyArg_ParseTuple(_args, "O&|O",
PyMac_GetPoint, &startPoint, &callback))
return NULL;
if (callback && callback != Py_None) {
if (PyInt_Check(callback) && PyInt_AS_LONG(callback) == -1)
upp = (ControlActionUPP)-1;
else {
settrackfunc(callback);
upp = mytracker_upp;
}
}
_rv = TrackControl(_self->ob_itself,
startPoint,
upp);
clrtrackfunc();
_res = Py_BuildValue("h",
_rv);
return _res;
"""
f = ManualGenerator("TrackControl", trackcontrol_body);
f.docstring = lambda: "(Point startPoint [,trackercallback]) -> (ControlPartCode _rv)"
object.add(f)
# CJW - added 5/12/99
# Manual generator for HandleControlClick, as for TrackControl
handlecontrolclick_body = """
ControlPartCode _rv;
Point startPoint;
SInt16 modifiers;
ControlActionUPP upp = 0;
PyObject *callback = 0;
if (!PyArg_ParseTuple(_args, "O&h|O",
PyMac_GetPoint, &startPoint,
&modifiers,
&callback))
return NULL;
if (callback && callback != Py_None) {
if (PyInt_Check(callback) && PyInt_AS_LONG(callback) == -1)
upp = (ControlActionUPP)-1;
else {
settrackfunc(callback);
upp = mytracker_upp;
}
}
_rv = HandleControlClick(_self->ob_itself,
startPoint,
modifiers,
upp);
clrtrackfunc();
_res = Py_BuildValue("h",
_rv);
return _res;
"""
f = ManualGenerator("HandleControlClick", handlecontrolclick_body);
f.docstring = lambda: "(Point startPoint, Integer modifiers, [,trackercallback]) -> (ControlPartCode _rv)"
object.add(f)
# Manual Generator for SetControlData
setcontroldata_body = """
OSErr _err;
ControlPartCode inPart;
ResType inTagName;
Size bufferSize;
Ptr buffer;
if (!PyArg_ParseTuple(_args, "hO&s#",
&inPart,
PyMac_GetOSType, &inTagName,
&buffer, &bufferSize))
return NULL;
_err = SetControlData(_self->ob_itself,
inPart,
inTagName,
bufferSize,
buffer);
if (_err != noErr)
return PyMac_Error(_err);
_res = Py_None;
return _res;
"""
f = ManualGenerator("SetControlData", setcontroldata_body);
f.docstring = lambda: "(stuff) -> None"
object.add(f)
# Manual Generator for GetControlData
getcontroldata_body = """
OSErr _err;
ControlPartCode inPart;
ResType inTagName;
Size bufferSize;
Ptr buffer;
Size outSize;
if (!PyArg_ParseTuple(_args, "hO&",
&inPart,
PyMac_GetOSType, &inTagName))
return NULL;
/* allocate a buffer for the data */
_err = GetControlDataSize(_self->ob_itself,
inPart,
inTagName,
&bufferSize);
if (_err != noErr)
return PyMac_Error(_err);
buffer = PyMem_NEW(char, bufferSize);
if (buffer == NULL)
return PyErr_NoMemory();
_err = GetControlData(_self->ob_itself,
inPart,
inTagName,
bufferSize,
buffer,
&outSize);
if (_err != noErr) {
PyMem_DEL(buffer);
return PyMac_Error(_err);
}
_res = Py_BuildValue("s#", buffer, outSize);
PyMem_DEL(buffer);
return _res;
"""
f = ManualGenerator("GetControlData", getcontroldata_body);
f.docstring = lambda: "(part, type) -> String"
object.add(f)
# Manual Generator for SetControlData_Handle
setcontroldata_handle_body = """
OSErr _err;
ControlPartCode inPart;
ResType inTagName;
Handle buffer;
if (!PyArg_ParseTuple(_args, "hO&O&",
&inPart,
PyMac_GetOSType, &inTagName,
OptResObj_Convert, &buffer))
return NULL;
_err = SetControlData(_self->ob_itself,
inPart,
inTagName,
sizeof(buffer),
(Ptr)&buffer);
if (_err != noErr)
return PyMac_Error(_err);
_res = Py_None;
return _res;
"""
f = ManualGenerator("SetControlData_Handle", setcontroldata_handle_body);
f.docstring = lambda: "(ResObj) -> None"
object.add(f)
# Manual Generator for GetControlData_Handle
getcontroldata_handle_body = """
OSErr _err;
ControlPartCode inPart;
ResType inTagName;
Size bufferSize;
Handle hdl;
if (!PyArg_ParseTuple(_args, "hO&",
&inPart,
PyMac_GetOSType, &inTagName))
return NULL;
/* Check it is handle-sized */
_err = GetControlDataSize(_self->ob_itself,
inPart,
inTagName,
&bufferSize);
if (_err != noErr)
return PyMac_Error(_err);
if (bufferSize != sizeof(Handle)) {
PyErr_SetString(Ctl_Error, "GetControlDataSize() != sizeof(Handle)");
return NULL;
}
_err = GetControlData(_self->ob_itself,
inPart,
inTagName,
sizeof(Handle),
(Ptr)&hdl,
&bufferSize);
if (_err != noErr) {
return PyMac_Error(_err);
}
_res = Py_BuildValue("O&", OptResObj_New, hdl);
return _res;
"""
f = ManualGenerator("GetControlData_Handle", getcontroldata_handle_body);
f.docstring = lambda: "(part, type) -> ResObj"
object.add(f)
# Manual Generator for SetControlData_Callback
setcontroldata_callback_body = """
OSErr _err;
ControlPartCode inPart;
ResType inTagName;
PyObject *callback;
UniversalProcPtr c_callback;
if (!PyArg_ParseTuple(_args, "hO&O",
&inPart,
PyMac_GetOSType, &inTagName,
&callback))
return NULL;
if ( setcallback((PyObject *)_self, inTagName, callback, &c_callback) < 0 )
return NULL;
_err = SetControlData(_self->ob_itself,
inPart,
inTagName,
sizeof(c_callback),
(Ptr)&c_callback);
if (_err != noErr)
return PyMac_Error(_err);
_res = Py_None;
return _res;
"""
f = ManualGenerator("SetControlData_Callback", setcontroldata_callback_body);
f.docstring = lambda: "(callbackfunc) -> None"
object.add(f)
createtabscontrol_body = """\
OSStatus _err;
WindowPtr window;
Rect boundsRect;
UInt16 size;
UInt16 direction;
int i;
UInt16 numTabs;
ControlTabEntry tabArray[MAXTABS];
ControlHandle outControl;
PyObject *tabArrayObj, *tabEntry;
#ifndef CreateTabsControl
PyMac_PRECHECK(CreateTabsControl);
#endif
if (!PyArg_ParseTuple(_args, "O&O&HHO",
WinObj_Convert, &window,
PyMac_GetRect, &boundsRect,
&size,
&direction,
&tabArrayObj))
return NULL;
i = PySequence_Length(tabArrayObj);
if (i == -1)
return NULL;
if (i > MAXTABS) {
PyErr_SetString(Ctl_Error, "Too many tabs");
return NULL;
}
numTabs = i;
for (i=0; i<numTabs; i++) {
tabEntry = PySequence_GetItem(tabArrayObj, i);
if (tabEntry == NULL)
return NULL;
if (!PyArg_Parse(tabEntry, "(O&O&B)",
ControlButtonContentInfo_Convert, &tabArray[i].icon,
CFStringRefObj_Convert, &tabArray[i].name,
&tabArray[i].enabled
))
return NULL;
}
_err = CreateTabsControl(window,
&boundsRect,
size,
direction,
numTabs,
tabArray,
&outControl);
if (_err != noErr) return PyMac_Error(_err);
_res = Py_BuildValue("O&",
CtlObj_New, outControl);
return _res;"""
f = ManualGenerator("CreateTabsControl", createtabscontrol_body)
f.docstring = lambda: "(WindowPtr window, Rect boundsRect, UInt16 size, UInt16 direction, ControlTabEntry tabArray) -> (ControlHandle outControl)"
module.add(f)
# generate output (open the output file as late as possible)
SetOutputFileName(OUTPUTFILE)
module.generate()
| gpl-2.0 |
nicko96/Chrome-Infra | glyco/glucose/install.py | 1 | 8024 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import hashlib
import httplib2
import logging
import os
import sys
import urllib
from glucose import util
LOGGER = logging.getLogger(__name__)
DEFAULT_CACHE = os.path.join(os.path.expanduser('~'), '.glyco_wheelcache')
def get_sha1_from_filename(filename, verbose=True):
"""Extract the claimed sha1 from the filename.
Also verify the name matches the wheel convention.
Args:
filename (str): path to a local file.
verbose (bool): print messages only if True.
Returns: claimed_hash(str) or None if no hash can be found.
"""
basename = os.path.split(filename)[-1]
wheel_info = util.WHEEL_FILE_RE.match(basename)
if not wheel_info:
if verbose:
print >> sys.stderr, 'Invalid file name for wheel: %s' % basename
return None
if not wheel_info.group('build'):
if verbose:
print >> sys.stderr, ('No hash could be found in the filename.\n'
'Has this file been generated with Glyco?\n'
'%s' % basename)
return None
return wheel_info.group('build').split('_')[1]
def has_valid_sha1(filename, verbose=True):
"""Verify the hash of a whl file created by Glyco.
Args:
filename (str): path to a whl file.
verbose(bool): print messages only if True.
Returns:
matches (bool): true if the file content and the name match.
"""
claimed_sha = get_sha1_from_filename(filename, verbose=verbose)
if not claimed_sha:
return False
with open(filename, 'rb') as f:
digest = hashlib.sha1(f.read())
actual_sha = digest.hexdigest()
return actual_sha == claimed_sha
def get_install_list(packages):
"""Consolidate the list of things to install.
Args:
packages (list of str): local paths or https/gs URLs.
"""
install_list = []
for package in packages:
location = package
location_type = 'ERROR'
error = None
# Let's support only https. Security matters.
if package.startswith('http://'):
error = 'Non-secure http is not supported, please use https: %s' % package
elif package.startswith('https://'):
location_type = 'http'
elif package.startswith('gs://'):
# TODO(pgervais): handle Cloud Storage properly.
location_type = 'http'
location = 'https://storage.googleapis.com/' + package[len('gs://'):]
elif os.path.isfile(package):
location = 'file://%s' % urllib.pathname2url(os.path.abspath(package))
location_type = 'file'
else:
error = ('Cannot find this file locally: %s\n'
'If you did not specify a file but an URI, '
'then the protocol is probably not supported.'
% os.path.abspath(package))
install_list.append({'location': location,
'location_type': location_type,
'error': error})
return install_list
def fetch_packages(install_list, requester=httplib2.Http(),
cache=DEFAULT_CACHE, verbose=True):
"""Make sure there is a local copy of all packages.
All paths returned by this function point at existing wheel files, with
correct hashes.
Args:
install_list (list of dict): return value of get_install_list.
requester (httplib2.Http): object to use to send http requests.
cache (str): path to a local directory used to store wheel files downloaded
from a remote storage.
verbose(bool): print messages only if True.
Returns:
paths (list of strings): path to each local wheel file.
"""
if not os.path.isdir(cache):
os.mkdir(cache)
paths = []
all_valid = True
for source in install_list:
if source['location_type'] == 'file':
assert source['location'].startswith('file://')
filename = source['location'][len('file://'):]
# FIXME(pgervais): convert to a windows path (/ -> \) and unquote.
if not has_valid_sha1(filename, verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
else:
paths.append(filename)
elif source['location_type'] == 'http':
# This is an URL so the path separator is necessarily /
base_filename = source['location'].split('/')[-1]
filename = os.path.join(cache, base_filename)
if not os.path.exists(filename):
# Try to download file to local cache
resp, content = requester.request(source['location'], 'GET')
if resp['status'] == '200':
temp_filename = os.path.join(cache, base_filename + '.tmp')
try:
with open(temp_filename, 'wb') as f:
f.write(content)
os.rename(temp_filename, filename)
except OSError:
if os.path.isfile(temp_filename):
os.remove(temp_filename)
else:
if verbose:
print >> sys.stderr, ("Got status %s when talking to %s" %
(resp['status'], source['location']))
all_valid = False
# We have to test again for existence since the download
# could have failed.
if os.path.exists(filename) and not has_valid_sha1(filename,
verbose=verbose):
if verbose:
print >> sys.stderr, ("File content does not match hash for %s"
% filename)
all_valid = False
# The file is bad anyway, there's no point in keeping it around.
# Plus we probably want to retry the download some time in the future.
os.remove(filename)
else:
paths.append(filename)
if not all_valid:
raise ValueError('Some errors occurred when getting wheel files.')
return paths
def install(args):
"""Install wheel files"""
if not args.packages:
print 'No packages have been provided on the command-line, doing nothing.'
return
if not args.install_dir:
print >> sys.stderr, ('No destination directory specified, aborting. \n'
'Use the --install-dir option to specify it')
return 2
install_list = get_install_list(args.packages)
error_msgs = [d['error'] for d in install_list if 'error' in d and d['error']]
if error_msgs:
print >> sys.stderr, ('\n'.join(error_msgs))
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
try:
package_paths = fetch_packages(install_list)
except ValueError:
print >> sys.stderr, 'Aborting (no packages installed)'
return 1
if not os.path.isdir(args.install_dir):
os.mkdir(args.install_dir)
with util.Virtualenv() as venv:
cmd = (['pip', 'install', '--no-index', '--target', args.install_dir]
+ package_paths)
LOGGER.debug('Running %s', ' '.join(cmd))
venv.check_call(cmd)
def add_subparser(subparsers):
"""Add the 'install' command.
Also add the 'lysis' command as a synonym (and pun).
Args:
subparsers: output of argparse.ArgumentParser.add_subparsers()
"""
install_parser = subparsers.add_parser('install',
help='Install wheel files to a local '
'directory (synonym of lysis)')
install_parser.set_defaults(command=install)
# Add synonym just for the pun
lysis_parser = subparsers.add_parser('lysis',
help='Install wheel files to a local '
'directory (synonym of install)')
lysis_parser.set_defaults(command=install)
for parser in (install_parser, lysis_parser):
parser.add_argument('--install-dir', '-i',
help='Directory where to install packages')
parser.add_argument('packages', metavar='PACKAGE', nargs='*',
help='Wheel files to install (path)')
| bsd-3-clause |
gymnasium/edx-platform | openedx/core/djangoapps/monitoring_utils/middleware.py | 12 | 6146 | """
Middleware for handling the storage, aggregation, and reporting of custom
metrics for monitoring.
At this time, the custom metrics can only be reported to New Relic.
This middleware will only call on the newrelic agent if there are any metrics
to report for this request, so it will not incur any processing overhead for
request handlers which do not record custom metrics.
"""
import logging
from uuid import uuid4
import psutil
from openedx.core.djangoapps.request_cache import get_cache
from openedx.core.djangoapps.waffle_utils import WaffleSwitchNamespace
log = logging.getLogger(__name__)
try:
import newrelic.agent
except ImportError:
log.warning("Unable to load NewRelic agent module")
newrelic = None # pylint: disable=invalid-name
REQUEST_CACHE_KEY = 'monitoring_custom_metrics'
WAFFLE_NAMESPACE = 'monitoring_utils'
class MonitoringCustomMetrics(object):
"""
The middleware class. Make sure to add below the request cache in
MIDDLEWARE_CLASSES.
"""
@classmethod
def _get_metrics_cache(cls):
"""
Get a reference to the part of the request cache wherein we store New
Relic custom metrics related to the current request.
"""
return get_cache(name=REQUEST_CACHE_KEY)
@classmethod
def accumulate_metric(cls, name, value):
"""
Accumulate a custom metric (name and value) in the metrics cache.
"""
metrics_cache = cls._get_metrics_cache()
metrics_cache.setdefault(name, 0)
metrics_cache[name] += value
@classmethod
def _batch_report(cls):
"""
Report the collected custom metrics to New Relic.
"""
if not newrelic:
return
metrics_cache = cls._get_metrics_cache()
for metric_name, metric_value in metrics_cache.iteritems():
newrelic.agent.add_custom_parameter(metric_name, metric_value)
# Whether or not there was an exception, report any custom NR metrics that
# may have been collected.
def process_response(self, request, response): # pylint: disable=unused-argument
"""
Django middleware handler to process a response
"""
self._batch_report()
return response
def process_exception(self, request, exception): # pylint: disable=unused-argument
"""
Django middleware handler to process an exception
"""
self._batch_report()
return None
class MonitoringMemoryMiddleware(object):
"""
Middleware for monitoring memory usage.
"""
memory_data_key = u'memory_data'
guid_key = u'guid_key'
def process_request(self, request):
if self._is_enabled():
self._cache[self.guid_key] = unicode(uuid4())
log_prefix = self._log_prefix(u"Before", request)
self._cache[self.memory_data_key] = self._memory_data(log_prefix)
def process_response(self, request, response):
if self._is_enabled():
log_prefix = self._log_prefix(u"After", request)
new_memory_data = self._memory_data(log_prefix)
log_prefix = self._log_prefix(u"Diff", request)
self._log_diff_memory_data(log_prefix, new_memory_data, self._cache.get(self.memory_data_key))
return response
@property
def _cache(self):
"""
Namespaced request cache for tracking memory usage.
"""
return get_cache(name='monitoring_memory')
def _log_prefix(self, prefix, request):
"""
Returns a formatted prefix for logging for the given request.
"""
# After a celery task runs, the request cache is cleared. So if celery
# tasks are running synchronously (CELERY_ALWAYS _EAGER), "guid_key"
# will no longer be in the request cache when process_response executes.
cached_guid = self._cache.get(self.guid_key) or u"without_guid"
return u"{} request '{} {} {}'".format(prefix, request.method, request.path, cached_guid)
def _memory_data(self, log_prefix):
"""
Returns a dict with information for current memory utilization.
Uses log_prefix in log statements.
"""
machine_data = psutil.virtual_memory()
process = psutil.Process()
process_data = {
'memory_info': process.get_memory_info(),
'ext_memory_info': process.get_ext_memory_info(),
'memory_percent': process.get_memory_percent(),
'cpu_percent': process.get_cpu_percent(),
}
log.info(u"%s Machine memory usage: %s; Process memory usage: %s", log_prefix, machine_data, process_data)
return {
'machine_data': machine_data,
'process_data': process_data,
}
def _log_diff_memory_data(self, prefix, new_memory_data, old_memory_data):
"""
Computes and logs the difference in memory utilization
between the given old and new memory data.
"""
def _vmem_used(memory_data):
return memory_data['machine_data'].used
def _process_mem_percent(memory_data):
return memory_data['process_data']['memory_percent']
def _process_rss(memory_data):
return memory_data['process_data']['memory_info'].rss
def _process_vms(memory_data):
return memory_data['process_data']['memory_info'].vms
if new_memory_data and old_memory_data:
log.info(
u"%s Diff Vmem used: %s, Diff percent memory: %s, Diff rss: %s, Diff vms: %s",
prefix,
_vmem_used(new_memory_data) - _vmem_used(old_memory_data),
_process_mem_percent(new_memory_data) - _process_mem_percent(old_memory_data),
_process_rss(new_memory_data) - _process_rss(old_memory_data),
_process_vms(new_memory_data) - _process_vms(old_memory_data),
)
def _is_enabled(self):
"""
Returns whether this middleware is enabled.
"""
return WaffleSwitchNamespace(name=WAFFLE_NAMESPACE).is_enabled(u'enable_memory_middleware')
| agpl-3.0 |
apporc/nova | nova/tests/unit/api/openstack/compute/test_services.py | 17 | 39103 | # Copyright 2012 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import calendar
import datetime
import iso8601
import mock
from oslo_utils import timeutils
import webob.exc
from nova.api.openstack import api_version_request as api_version
from nova.api.openstack.compute.legacy_v2.contrib import services \
as services_v2
from nova.api.openstack.compute import services as services_v21
from nova.api.openstack import extensions
from nova.api.openstack import wsgi as os_wsgi
from nova import availability_zones
from nova.cells import utils as cells_utils
from nova.compute import cells_api
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.servicegroup.drivers import db as db_driver
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit.objects import test_service
fake_services_list = [
dict(test_service.fake_service,
binary='nova-scheduler',
host='host1',
id=1,
disabled=True,
topic='scheduler',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 2),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
forced_down=False,
disabled_reason='test1'),
dict(test_service.fake_service,
binary='nova-compute',
host='host1',
id=2,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 10, 29, 13, 42, 5),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 27),
forced_down=False,
disabled_reason='test2'),
dict(test_service.fake_service,
binary='nova-scheduler',
host='host2',
id=3,
disabled=False,
topic='scheduler',
updated_at=datetime.datetime(2012, 9, 19, 6, 55, 34),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
forced_down=False,
disabled_reason=None),
dict(test_service.fake_service,
binary='nova-compute',
host='host2',
id=4,
disabled=True,
topic='compute',
updated_at=datetime.datetime(2012, 9, 18, 8, 3, 38),
created_at=datetime.datetime(2012, 9, 18, 2, 46, 28),
forced_down=False,
disabled_reason='test4'),
]
class FakeRequest(object):
environ = {"nova.context": context.get_admin_context()}
GET = {}
def __init__(self, version=os_wsgi.DEFAULT_API_VERSION): # version='2.1'):
super(FakeRequest, self).__init__()
self.api_version_request = api_version.APIVersionRequest(version)
class FakeRequestWithService(FakeRequest):
GET = {"binary": "nova-compute"}
class FakeRequestWithHost(FakeRequest):
GET = {"host": "host1"}
class FakeRequestWithHostService(FakeRequest):
GET = {"host": "host1", "binary": "nova-compute"}
def fake_service_get_all(services):
def service_get_all(context, filters=None, set_zones=False):
if set_zones or 'availability_zone' in filters:
return availability_zones.set_availability_zones(context,
services)
return services
return service_get_all
def fake_db_api_service_get_all(context, disabled=None):
return fake_services_list
def fake_db_service_get_by_host_binary(services):
def service_get_by_host_binary(context, host, binary):
for service in services:
if service['host'] == host and service['binary'] == binary:
return service
raise exception.HostBinaryNotFound(host=host, binary=binary)
return service_get_by_host_binary
def fake_service_get_by_host_binary(context, host, binary):
fake = fake_db_service_get_by_host_binary(fake_services_list)
return fake(context, host, binary)
def _service_get_by_id(services, value):
for service in services:
if service['id'] == value:
return service
return None
def fake_db_service_update(services):
def service_update(context, service_id, values):
service = _service_get_by_id(services, service_id)
if service is None:
raise exception.ServiceNotFound(service_id=service_id)
return service
return service_update
def fake_service_update(context, service_id, values):
fake = fake_db_service_update(fake_services_list)
return fake(context, service_id, values)
def fake_utcnow():
return datetime.datetime(2012, 10, 29, 13, 42, 11)
fake_utcnow.override_time = None
def fake_utcnow_ts():
d = fake_utcnow()
return calendar.timegm(d.utctimetuple())
class ServicesTestV21(test.TestCase):
service_is_up_exc = webob.exc.HTTPInternalServerError
bad_request = exception.ValidationError
wsgi_api_version = os_wsgi.DEFAULT_API_VERSION
def _set_up_controller(self):
self.controller = services_v21.ServiceController()
def setUp(self):
super(ServicesTestV21, self).setUp()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self._set_up_controller()
self.stubs.Set(self.controller.host_api, "service_get_all",
fake_service_get_all(fake_services_list))
self.stubs.Set(timeutils, "utcnow", fake_utcnow)
self.stubs.Set(timeutils, "utcnow_ts", fake_utcnow_ts)
self.stubs.Set(db, "service_get_by_host_and_binary",
fake_db_service_get_by_host_binary(fake_services_list))
self.stubs.Set(db, "service_update",
fake_db_service_update(fake_services_list))
self.req = fakes.HTTPRequest.blank('')
def _process_output(self, services, has_disabled=False, has_id=False):
return services
def test_services_list(self):
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'disabled_reason': 'test1',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'disabled_reason': 'test2',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'id': 3,
'status': 'enabled',
'disabled_reason': None,
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'disabled_reason': 'test4',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host(self):
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'disabled_reason': 'test1',
'id': 1,
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_service(self):
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'disabled_reason': 'test2',
'id': 2,
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'disabled_reason': 'test4',
'id': 4,
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host_service(self):
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'id': 2,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'id': 3,
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34),
'disabled_reason': None},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHost()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'id': 1,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHostService()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'id': 2,
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_services_detail_with_delete_extension(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'id': 1,
'zone': 'internal',
'disabled_reason': 'test1',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'id': 2,
'zone': 'nova',
'disabled_reason': 'test2',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'disabled_reason': None,
'id': 3,
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'disabled_reason': 'test4',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response, has_id=True)
self.assertEqual(res_dict, response)
def test_services_enable(self):
def _service_update(context, service_id, values):
self.assertIsNone(values['disabled_reason'])
return dict(test_service.fake_service, id=service_id, **values)
self.stubs.Set(db, "service_update", _service_update)
body = {'host': 'host1', 'binary': 'nova-compute'}
res_dict = self.controller.update(self.req, "enable", body=body)
self.assertEqual(res_dict['service']['status'], 'enabled')
self.assertNotIn('disabled_reason', res_dict['service'])
def test_services_enable_with_invalid_host(self):
body = {'host': 'invalid', 'binary': 'nova-compute'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"enable",
body=body)
def test_services_enable_with_invalid_binary(self):
body = {'host': 'host1', 'binary': 'invalid'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"enable",
body=body)
def test_services_disable(self):
body = {'host': 'host1', 'binary': 'nova-compute'}
res_dict = self.controller.update(self.req, "disable", body=body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertNotIn('disabled_reason', res_dict['service'])
def test_services_disable_with_invalid_host(self):
body = {'host': 'invalid', 'binary': 'nova-compute'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"disable",
body=body)
def test_services_disable_with_invalid_binary(self):
body = {'host': 'host1', 'binary': 'invalid'}
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.update,
self.req,
"disable",
body=body)
def test_services_disable_log_reason(self):
self.ext_mgr.extensions['os-extended-services'] = True
body = {'host': 'host1',
'binary': 'nova-compute',
'disabled_reason': 'test-reason',
}
res_dict = self.controller.update(self.req,
"disable-log-reason",
body=body)
self.assertEqual(res_dict['service']['status'], 'disabled')
self.assertEqual(res_dict['service']['disabled_reason'], 'test-reason')
def test_mandatory_reason_field(self):
self.ext_mgr.extensions['os-extended-services'] = True
body = {'host': 'host1',
'binary': 'nova-compute',
}
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.update, self.req, "disable-log-reason",
body=body)
def test_invalid_reason_field(self):
self.ext_mgr.extensions['os-extended-services'] = True
reason = 'a' * 256
body = {'host': 'host1',
'binary': 'nova-compute',
'disabled_reason': reason,
}
self.assertRaises(self.bad_request,
self.controller.update, self.req, "disable-log-reason",
body=body)
def test_services_delete(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
with mock.patch.object(self.controller.host_api,
'service_delete') as service_delete:
self.controller.delete(self.req, '1')
service_delete.assert_called_once_with(
self.req.environ['nova.context'], '1')
self.assertEqual(self.controller.delete.wsgi_code, 204)
def test_services_delete_not_found(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(webob.exc.HTTPNotFound,
self.controller.delete, self.req, 1234)
def test_services_delete_bad_request(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.delete, self.req, 'abc')
# This test is just to verify that the servicegroup API gets used when
# calling the API
def test_services_with_exception(self):
def dummy_is_up(self, dummy):
raise KeyError()
self.stubs.Set(db_driver.DbDriver, 'is_up', dummy_is_up)
req = FakeRequestWithHostService()
self.assertRaises(self.service_is_up_exc, self.controller.index, req)
class ServicesTestV211(ServicesTestV21):
wsgi_api_version = '2.11'
def test_services_list(self):
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'forced_down': False,
'disabled_reason': 'test1',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'disabled_reason': 'test2',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'id': 3,
'status': 'enabled',
'disabled_reason': None,
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'disabled_reason': 'test4',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host(self):
req = FakeRequestWithHost(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'disabled_reason': 'test1',
'id': 1,
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_service(self):
req = FakeRequestWithService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'disabled_reason': 'test2',
'id': 2,
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'disabled_reason': 'test4',
'id': 4,
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_list_with_host_service(self):
req = FakeRequestWithHostService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'disabled_reason': 'test2',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)}]}
self._process_output(response)
self.assertEqual(res_dict, response)
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'status': 'disabled',
'id': 1,
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'id': 2,
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-scheduler',
'host': 'host2',
'zone': 'internal',
'status': 'enabled',
'id': 3,
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34),
'disabled_reason': None},
{'binary': 'nova-compute',
'host': 'host2',
'zone': 'nova',
'id': 4,
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHost(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'zone': 'internal',
'id': 1,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'disabled_reason': 'test1'},
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'id': 2,
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38),
'disabled_reason': 'test4'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_service_detail_with_host_service(self):
self.ext_mgr.extensions['os-extended-services'] = True
req = FakeRequestWithHostService(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'id': 2,
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5),
'disabled_reason': 'test2'}]}
self._process_output(response, has_disabled=True)
self.assertEqual(res_dict, response)
def test_services_detail_with_delete_extension(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest(self.wsgi_api_version)
res_dict = self.controller.index(req)
response = {'services': [
{'binary': 'nova-scheduler',
'host': 'host1',
'id': 1,
'zone': 'internal',
'disabled_reason': 'test1',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2)},
{'binary': 'nova-compute',
'host': 'host1',
'id': 2,
'zone': 'nova',
'disabled_reason': 'test2',
'status': 'disabled',
'state': 'up',
'forced_down': False,
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5)},
{'binary': 'nova-scheduler',
'host': 'host2',
'disabled_reason': None,
'id': 3,
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34)},
{'binary': 'nova-compute',
'host': 'host2',
'id': 4,
'disabled_reason': 'test4',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'forced_down': False,
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38)}]}
self._process_output(response, has_id=True)
self.assertEqual(res_dict, response)
class ServicesTestV20(ServicesTestV21):
service_is_up_exc = KeyError
bad_request = webob.exc.HTTPBadRequest
def setUp(self):
super(ServicesTestV20, self).setUp()
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
self.non_admin_req = fakes.HTTPRequest.blank('')
def _set_up_controller(self):
self.controller = services_v2.ServiceController(self.ext_mgr)
def test_services_delete_not_enabled(self):
self.assertRaises(webob.exc.HTTPMethodNotAllowed,
self.controller.delete, self.req, '300')
def _process_output(self, services, has_disabled=False, has_id=False):
for service in services['services']:
if not has_disabled:
service.pop('disabled_reason')
if not has_id:
service.pop('id')
return services
def test_update_with_non_admin(self):
self.assertRaises(exception.AdminRequired, self.controller.update,
self.non_admin_req, fakes.FAKE_UUID, body={})
def test_delete_with_non_admin(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
self.assertRaises(exception.AdminRequired, self.controller.delete,
self.non_admin_req, fakes.FAKE_UUID)
def test_index_with_non_admin(self):
self.assertRaises(exception.AdminRequired, self.controller.index,
self.non_admin_req)
class ServicesCellsTestV21(test.TestCase):
def setUp(self):
super(ServicesCellsTestV21, self).setUp()
host_api = cells_api.HostAPI()
self.ext_mgr = extensions.ExtensionManager()
self.ext_mgr.extensions = {}
self._set_up_controller()
self.controller.host_api = host_api
self.stubs.Set(timeutils, "utcnow", fake_utcnow)
self.stubs.Set(timeutils, "utcnow_ts", fake_utcnow_ts)
services_list = []
for service in fake_services_list:
service = service.copy()
del service['version']
service_obj = objects.Service(**service)
service_proxy = cells_utils.ServiceProxy(service_obj, 'cell1')
services_list.append(service_proxy)
self.stubs.Set(host_api.cells_rpcapi, "service_get_all",
fake_service_get_all(services_list))
def _set_up_controller(self):
self.controller = services_v21.ServiceController()
def _process_out(self, res_dict):
for res in res_dict['services']:
res.pop('disabled_reason')
def test_services_detail(self):
self.ext_mgr.extensions['os-extended-services-delete'] = True
req = FakeRequest()
res_dict = self.controller.index(req)
utc = iso8601.iso8601.Utc()
response = {'services': [
{'id': 'cell1@1',
'binary': 'nova-scheduler',
'host': 'cell1@host1',
'zone': 'internal',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2,
tzinfo=utc)},
{'id': 'cell1@2',
'binary': 'nova-compute',
'host': 'cell1@host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up',
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 5,
tzinfo=utc)},
{'id': 'cell1@3',
'binary': 'nova-scheduler',
'host': 'cell1@host2',
'zone': 'internal',
'status': 'enabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 19, 6, 55, 34,
tzinfo=utc)},
{'id': 'cell1@4',
'binary': 'nova-compute',
'host': 'cell1@host2',
'zone': 'nova',
'status': 'disabled',
'state': 'down',
'updated_at': datetime.datetime(2012, 9, 18, 8, 3, 38,
tzinfo=utc)}]}
self._process_out(res_dict)
self.assertEqual(response, res_dict)
class ServicesCellsTestV20(ServicesCellsTestV21):
def _set_up_controller(self):
self.controller = services_v2.ServiceController(self.ext_mgr)
def _process_out(self, res_dict):
pass
class ServicesPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(ServicesPolicyEnforcementV21, self).setUp()
self.controller = services_v21.ServiceController()
self.req = fakes.HTTPRequest.blank('')
def test_update_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.update, self.req, fakes.FAKE_UUID,
body={'host': 'host1',
'binary': 'nova-compute'})
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_delete_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.delete, self.req, fakes.FAKE_UUID)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_index_policy_failed(self):
rule_name = "os_compute_api:os-services"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
| apache-2.0 |
gpndata/grpc | src/python/grpcio/grpc/framework/core/__init__.py | 1496 | 1530 | # Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| bsd-3-clause |
google-research/google-research | fairness_teaching/baseline/all_real.py | 1 | 5585 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import argparse
import numpy as np
import tensorflow as tf
import data
import model
# pylint: skip-file
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', type=str, default='0', help='GPU to use [default: GPU 0]')
parser.add_argument('--real_path', default='../data/resize128')
parser.add_argument('--fake_path', default='../data/fake')
parser.add_argument('--train_label', default='../data/annotations/train_label.txt')
parser.add_argument('--test_label', default='../data/annotations/test_label.txt')
parser.add_argument('--valid_label', default='../data/annotations/val_label.txt')
parser.add_argument('--max_epoch', type=int, default=20, help='Epoch to run [default: 20]')
parser.add_argument('--batch_size', type=int, default=64, help='Batch Size during training [default: 64]')
parser.add_argument('--n_class', type=int, default=2, help='Number of class [default: 2]')
parser.add_argument('--lr', type=float, default=0.1, help='Initial learning rate [default: 0.1]')
parser.add_argument('--momentum', type=float, default=0.9, help='Initial learning rate [default: 0.9]')
parser.add_argument('--optimizer', default='momentum', help='adam or momentum [default: momentum]')
FLAGS = parser.parse_args()
ATT_ID = {'5_o_Clock_Shadow': 0, 'Arched_Eyebrows': 1, 'Attractive': 2,
'Bags_Under_Eyes': 3, 'Bald': 4, 'Bangs': 5, 'Big_Lips': 6,
'Big_Nose': 7, 'Black_Hair': 8, 'Blond_Hair': 9, 'Blurry': 10,
'Brown_Hair': 11, 'Bushy_Eyebrows': 12, 'Chubby': 13,
'Double_Chin': 14, 'Eyeglasses': 15, 'Goatee': 16,
'Gray_Hair': 17, 'Heavy_Makeup': 18, 'High_Cheekbones': 19,
'Male': 20, 'Mouth_Slightly_Open': 21, 'Mustache': 22,
'Narrow_Eyes': 23, 'No_Beard': 24, 'Oval_Face': 25,
'Pale_Skin': 26, 'Pointy_Nose': 27, 'Receding_Hairline': 28,
'Rosy_Cheeks': 29, 'Sideburns': 30, 'Smiling': 31,
'Straight_Hair': 32, 'Wavy_Hair': 33, 'Wearing_Earrings': 34,
'Wearing_Hat': 35, 'Wearing_Lipstick': 36,
'Wearing_Necklace': 37, 'Wearing_Necktie': 38, 'Young': 39}
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=FLAGS.gpu
# tf.set_random_seed(0)# 0 for 512
tf.set_random_seed(100)
(train_images, train_labels, train_att), train_iters = data.data_train(FLAGS.real_path, FLAGS.train_label, 64)
(fake_images, fake_labels, fake_att), fake_iters = data.data_fake(FLAGS.fake_path, FLAGS.train_label, 64)
(valid_images, valid_labels, valid_att), valid_iters = data.data_test(FLAGS.real_path, FLAGS.valid_label, FLAGS.batch_size)
(test_images, test_labels, test_att), test_iters = data.data_test(FLAGS.real_path, FLAGS.test_label, FLAGS.batch_size)
batch_images = tf.placeholder(tf.float32,[None,128,128,3])
batch_labels = tf.placeholder(tf.int32,[None,])
is_training = tf.placeholder(tf.bool)
lr_ph = tf.placeholder(tf.float32)
lr = FLAGS.lr
Y_score = model.vgg(batch_images, FLAGS.n_class, is_training)
Y_hat = tf.nn.softmax(Y_score)
Y_pred = tf.argmax(Y_hat, 1)
Y_label = tf.to_float(tf.one_hot(batch_labels, FLAGS.n_class))
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(logits = Y_score, labels = Y_label)
loss_op = tf.reduce_mean(cross_entropy)
correct_prediction = tf.equal(tf.argmax(Y_hat, 1), tf.argmax(Y_label, 1))
acc_op = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
update_op = tf.train.MomentumOptimizer(lr_ph, FLAGS.momentum).minimize(loss_op)
init = tf.global_variables_initializer()
print("================\n\n",train_iters, fake_iters)
with tf.Session() as sess:
sess.run(init)
for i in range(FLAGS.max_epoch):
if i == 30:
lr *= 0.1
elif i == 40:
lr *= 0.1
for j in range(train_iters):
co_images, co_labels = sess.run([train_images,train_labels])
# tr_images, tr_labels = sess.run([train_images,train_labels])
# fa_images, fa_labels = sess.run([fake_images,fake_labels])
# co_images = np.concatenate((tr_images,fa_images),axis=0)
# co_labels = np.concatenate((tr_labels,fa_labels),axis=0)
loss, acc, _ = sess.run([loss_op, acc_op, update_op], {batch_images:co_images, batch_labels:co_labels, lr_ph:lr, is_training:True})
if j % 50 == 0:
print('====epoch_%d====iter_%d: loss=%.4f, train_acc=%.4f' % (i, j, loss, acc))
valid_acc = 0.0
y_pred =[]
y_label = []
y_att = []
for k in range(valid_iters):
va_images, va_labels, va_att = sess.run([valid_images, valid_labels, valid_att])
batch_acc, batch_pred = sess.run([acc_op,Y_pred], {batch_images:va_images, batch_labels:va_labels, is_training:False})
valid_acc += batch_acc
y_pred += batch_pred.tolist()
y_label += va_labels.tolist()
y_att += va_att.tolist()
valid_acc = valid_acc / float(valid_iters)
valid_eo = data.cal_eo(y_att, y_label, y_pred)
print('====epoch_%d: valid_acc=%.4f, valid_eo=%.4f' % (i, valid_acc, valid_eo[-1]))
print('eo: ',valid_eo[0],valid_eo[1])
print('eo: ',valid_eo[2],valid_eo[3])
| apache-2.0 |
MCMic/Sick-Beard | lib/unidecode/x0c6.py | 253 | 4490 | data = (
'yeoss', # 0x00
'yeong', # 0x01
'yeoj', # 0x02
'yeoc', # 0x03
'yeok', # 0x04
'yeot', # 0x05
'yeop', # 0x06
'yeoh', # 0x07
'ye', # 0x08
'yeg', # 0x09
'yegg', # 0x0a
'yegs', # 0x0b
'yen', # 0x0c
'yenj', # 0x0d
'yenh', # 0x0e
'yed', # 0x0f
'yel', # 0x10
'yelg', # 0x11
'yelm', # 0x12
'yelb', # 0x13
'yels', # 0x14
'yelt', # 0x15
'yelp', # 0x16
'yelh', # 0x17
'yem', # 0x18
'yeb', # 0x19
'yebs', # 0x1a
'yes', # 0x1b
'yess', # 0x1c
'yeng', # 0x1d
'yej', # 0x1e
'yec', # 0x1f
'yek', # 0x20
'yet', # 0x21
'yep', # 0x22
'yeh', # 0x23
'o', # 0x24
'og', # 0x25
'ogg', # 0x26
'ogs', # 0x27
'on', # 0x28
'onj', # 0x29
'onh', # 0x2a
'od', # 0x2b
'ol', # 0x2c
'olg', # 0x2d
'olm', # 0x2e
'olb', # 0x2f
'ols', # 0x30
'olt', # 0x31
'olp', # 0x32
'olh', # 0x33
'om', # 0x34
'ob', # 0x35
'obs', # 0x36
'os', # 0x37
'oss', # 0x38
'ong', # 0x39
'oj', # 0x3a
'oc', # 0x3b
'ok', # 0x3c
'ot', # 0x3d
'op', # 0x3e
'oh', # 0x3f
'wa', # 0x40
'wag', # 0x41
'wagg', # 0x42
'wags', # 0x43
'wan', # 0x44
'wanj', # 0x45
'wanh', # 0x46
'wad', # 0x47
'wal', # 0x48
'walg', # 0x49
'walm', # 0x4a
'walb', # 0x4b
'wals', # 0x4c
'walt', # 0x4d
'walp', # 0x4e
'walh', # 0x4f
'wam', # 0x50
'wab', # 0x51
'wabs', # 0x52
'was', # 0x53
'wass', # 0x54
'wang', # 0x55
'waj', # 0x56
'wac', # 0x57
'wak', # 0x58
'wat', # 0x59
'wap', # 0x5a
'wah', # 0x5b
'wae', # 0x5c
'waeg', # 0x5d
'waegg', # 0x5e
'waegs', # 0x5f
'waen', # 0x60
'waenj', # 0x61
'waenh', # 0x62
'waed', # 0x63
'wael', # 0x64
'waelg', # 0x65
'waelm', # 0x66
'waelb', # 0x67
'waels', # 0x68
'waelt', # 0x69
'waelp', # 0x6a
'waelh', # 0x6b
'waem', # 0x6c
'waeb', # 0x6d
'waebs', # 0x6e
'waes', # 0x6f
'waess', # 0x70
'waeng', # 0x71
'waej', # 0x72
'waec', # 0x73
'waek', # 0x74
'waet', # 0x75
'waep', # 0x76
'waeh', # 0x77
'oe', # 0x78
'oeg', # 0x79
'oegg', # 0x7a
'oegs', # 0x7b
'oen', # 0x7c
'oenj', # 0x7d
'oenh', # 0x7e
'oed', # 0x7f
'oel', # 0x80
'oelg', # 0x81
'oelm', # 0x82
'oelb', # 0x83
'oels', # 0x84
'oelt', # 0x85
'oelp', # 0x86
'oelh', # 0x87
'oem', # 0x88
'oeb', # 0x89
'oebs', # 0x8a
'oes', # 0x8b
'oess', # 0x8c
'oeng', # 0x8d
'oej', # 0x8e
'oec', # 0x8f
'oek', # 0x90
'oet', # 0x91
'oep', # 0x92
'oeh', # 0x93
'yo', # 0x94
'yog', # 0x95
'yogg', # 0x96
'yogs', # 0x97
'yon', # 0x98
'yonj', # 0x99
'yonh', # 0x9a
'yod', # 0x9b
'yol', # 0x9c
'yolg', # 0x9d
'yolm', # 0x9e
'yolb', # 0x9f
'yols', # 0xa0
'yolt', # 0xa1
'yolp', # 0xa2
'yolh', # 0xa3
'yom', # 0xa4
'yob', # 0xa5
'yobs', # 0xa6
'yos', # 0xa7
'yoss', # 0xa8
'yong', # 0xa9
'yoj', # 0xaa
'yoc', # 0xab
'yok', # 0xac
'yot', # 0xad
'yop', # 0xae
'yoh', # 0xaf
'u', # 0xb0
'ug', # 0xb1
'ugg', # 0xb2
'ugs', # 0xb3
'un', # 0xb4
'unj', # 0xb5
'unh', # 0xb6
'ud', # 0xb7
'ul', # 0xb8
'ulg', # 0xb9
'ulm', # 0xba
'ulb', # 0xbb
'uls', # 0xbc
'ult', # 0xbd
'ulp', # 0xbe
'ulh', # 0xbf
'um', # 0xc0
'ub', # 0xc1
'ubs', # 0xc2
'us', # 0xc3
'uss', # 0xc4
'ung', # 0xc5
'uj', # 0xc6
'uc', # 0xc7
'uk', # 0xc8
'ut', # 0xc9
'up', # 0xca
'uh', # 0xcb
'weo', # 0xcc
'weog', # 0xcd
'weogg', # 0xce
'weogs', # 0xcf
'weon', # 0xd0
'weonj', # 0xd1
'weonh', # 0xd2
'weod', # 0xd3
'weol', # 0xd4
'weolg', # 0xd5
'weolm', # 0xd6
'weolb', # 0xd7
'weols', # 0xd8
'weolt', # 0xd9
'weolp', # 0xda
'weolh', # 0xdb
'weom', # 0xdc
'weob', # 0xdd
'weobs', # 0xde
'weos', # 0xdf
'weoss', # 0xe0
'weong', # 0xe1
'weoj', # 0xe2
'weoc', # 0xe3
'weok', # 0xe4
'weot', # 0xe5
'weop', # 0xe6
'weoh', # 0xe7
'we', # 0xe8
'weg', # 0xe9
'wegg', # 0xea
'wegs', # 0xeb
'wen', # 0xec
'wenj', # 0xed
'wenh', # 0xee
'wed', # 0xef
'wel', # 0xf0
'welg', # 0xf1
'welm', # 0xf2
'welb', # 0xf3
'wels', # 0xf4
'welt', # 0xf5
'welp', # 0xf6
'welh', # 0xf7
'wem', # 0xf8
'web', # 0xf9
'webs', # 0xfa
'wes', # 0xfb
'wess', # 0xfc
'weng', # 0xfd
'wej', # 0xfe
'wec', # 0xff
)
| gpl-3.0 |
tuhangdi/django | tests/model_regress/tests.py | 326 | 8962 | from __future__ import unicode_literals
import datetime
from operator import attrgetter
from django.core.exceptions import ValidationError
from django.db import router
from django.db.models.sql import InsertQuery
from django.test import TestCase, skipUnlessDBFeature
from django.utils import six
from django.utils.timezone import get_fixed_timezone
from .models import (
Article, BrokenUnicodeMethod, Department, Event, Model1, Model2, Model3,
NonAutoPK, Party, Worker,
)
class ModelTests(TestCase):
# The bug is that the following queries would raise:
# "TypeError: Related Field has invalid lookup: gte"
def test_related_gte_lookup(self):
"""
Regression test for #10153: foreign key __gte lookups.
"""
Worker.objects.filter(department__gte=0)
def test_related_lte_lookup(self):
"""
Regression test for #10153: foreign key __lte lookups.
"""
Worker.objects.filter(department__lte=0)
def test_sql_insert_compiler_return_id_attribute(self):
"""
Regression test for #14019: SQLInsertCompiler.as_sql() failure
"""
db = router.db_for_write(Party)
query = InsertQuery(Party)
query.insert_values([Party._meta.fields[0]], [], raw=False)
# this line will raise an AttributeError without the accompanying fix
query.get_compiler(using=db).as_sql()
def test_empty_choice(self):
# NOTE: Part of the regression test here is merely parsing the model
# declaration. The verbose_name, in particular, did not always work.
a = Article.objects.create(
headline="Look at me!", pub_date=datetime.datetime.now()
)
# An empty choice field should return None for the display name.
self.assertIs(a.get_status_display(), None)
# Empty strings should be returned as Unicode
a = Article.objects.get(pk=a.pk)
self.assertEqual(a.misc_data, '')
self.assertIs(type(a.misc_data), six.text_type)
def test_long_textfield(self):
# TextFields can hold more than 4000 characters (this was broken in
# Oracle).
a = Article.objects.create(
headline="Really, really big",
pub_date=datetime.datetime.now(),
article_text="ABCDE" * 1000
)
a = Article.objects.get(pk=a.pk)
self.assertEqual(len(a.article_text), 5000)
def test_long_unicode_textfield(self):
# TextFields can hold more than 4000 bytes also when they are
# less than 4000 characters
a = Article.objects.create(
headline="Really, really big",
pub_date=datetime.datetime.now(),
article_text='\u05d0\u05d1\u05d2' * 1000
)
a = Article.objects.get(pk=a.pk)
self.assertEqual(len(a.article_text), 3000)
def test_date_lookup(self):
# Regression test for #659
Party.objects.create(when=datetime.datetime(1999, 12, 31))
Party.objects.create(when=datetime.datetime(1998, 12, 31))
Party.objects.create(when=datetime.datetime(1999, 1, 1))
Party.objects.create(when=datetime.datetime(1, 3, 3))
self.assertQuerysetEqual(
Party.objects.filter(when__month=2), []
)
self.assertQuerysetEqual(
Party.objects.filter(when__month=1), [
datetime.date(1999, 1, 1)
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__month=12), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when"),
ordered=False
)
self.assertQuerysetEqual(
Party.objects.filter(when__year=1998), [
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
# Regression test for #8510
self.assertQuerysetEqual(
Party.objects.filter(when__day="31"), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when"),
ordered=False
)
self.assertQuerysetEqual(
Party.objects.filter(when__month="12"), [
datetime.date(1999, 12, 31),
datetime.date(1998, 12, 31),
],
attrgetter("when"),
ordered=False
)
self.assertQuerysetEqual(
Party.objects.filter(when__year="1998"), [
datetime.date(1998, 12, 31),
],
attrgetter("when")
)
# Regression test for #18969
self.assertQuerysetEqual(
Party.objects.filter(when__year=1), [
datetime.date(1, 3, 3),
],
attrgetter("when")
)
self.assertQuerysetEqual(
Party.objects.filter(when__year='1'), [
datetime.date(1, 3, 3),
],
attrgetter("when")
)
def test_date_filter_null(self):
# Date filtering was failing with NULL date values in SQLite
# (regression test for #3501, among other things).
Party.objects.create(when=datetime.datetime(1999, 1, 1))
Party.objects.create()
p = Party.objects.filter(when__month=1)[0]
self.assertEqual(p.when, datetime.date(1999, 1, 1))
self.assertQuerysetEqual(
Party.objects.filter(pk=p.pk).dates("when", "month"), [
1
],
attrgetter("month")
)
def test_get_next_prev_by_field(self):
# Check that get_next_by_FIELD and get_previous_by_FIELD don't crash
# when we have usecs values stored on the database
#
# It crashed after the Field.get_db_prep_* refactor, because on most
# backends DateTimeFields supports usecs, but DateTimeField.to_python
# didn't recognize them. (Note that
# Model._get_next_or_previous_by_FIELD coerces values to strings)
Event.objects.create(when=datetime.datetime(2000, 1, 1, 16, 0, 0))
Event.objects.create(when=datetime.datetime(2000, 1, 1, 6, 1, 1))
Event.objects.create(when=datetime.datetime(2000, 1, 1, 13, 1, 1))
e = Event.objects.create(when=datetime.datetime(2000, 1, 1, 12, 0, 20, 24))
self.assertEqual(
e.get_next_by_when().when, datetime.datetime(2000, 1, 1, 13, 1, 1)
)
self.assertEqual(
e.get_previous_by_when().when, datetime.datetime(2000, 1, 1, 6, 1, 1)
)
def test_primary_key_foreign_key_types(self):
# Check Department and Worker (non-default PK type)
d = Department.objects.create(id=10, name="IT")
w = Worker.objects.create(department=d, name="Full-time")
self.assertEqual(six.text_type(w), "Full-time")
def test_broken_unicode(self):
# Models with broken unicode methods should still have a printable repr
b = BrokenUnicodeMethod.objects.create(name="Jerry")
self.assertEqual(repr(b), "<BrokenUnicodeMethod: [Bad Unicode data]>")
@skipUnlessDBFeature("supports_timezones")
def test_timezones(self):
# Saving an updating with timezone-aware datetime Python objects.
# Regression test for #10443.
# The idea is that all these creations and saving should work without
# crashing. It's not rocket science.
dt1 = datetime.datetime(2008, 8, 31, 16, 20, tzinfo=get_fixed_timezone(600))
dt2 = datetime.datetime(2008, 8, 31, 17, 20, tzinfo=get_fixed_timezone(600))
obj = Article.objects.create(
headline="A headline", pub_date=dt1, article_text="foo"
)
obj.pub_date = dt2
obj.save()
self.assertEqual(
Article.objects.filter(headline="A headline").update(pub_date=dt1),
1
)
def test_chained_fks(self):
"""
Regression for #18432: Chained foreign keys with to_field produce incorrect query
"""
m1 = Model1.objects.create(pkey=1000)
m2 = Model2.objects.create(model1=m1)
m3 = Model3.objects.create(model2=m2)
# this is the actual test for #18432
m3 = Model3.objects.get(model2=1000)
m3.model2
class ModelValidationTest(TestCase):
def test_pk_validation(self):
NonAutoPK.objects.create(name="one")
again = NonAutoPK(name="one")
self.assertRaises(ValidationError, again.validate_unique)
class EvaluateMethodTest(TestCase):
"""
Regression test for #13640: cannot filter by objects with 'evaluate' attr
"""
def test_model_with_evaluate_method(self):
"""
Ensures that you can filter by objects that have an 'evaluate' attr
"""
dept = Department.objects.create(pk=1, name='abc')
dept.evaluate = 'abc'
Worker.objects.filter(department=dept)
| bsd-3-clause |
laumann/servo | components/script/dom/bindings/codegen/CodegenRust.py | 1 | 243619 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Common codegen classes.
from collections import defaultdict
import operator
import re
import string
import textwrap
import functools
from WebIDL import (
BuiltinTypes,
IDLBuiltinType,
IDLNullValue,
IDLType,
IDLInterfaceMember,
IDLUndefinedValue,
)
from Configuration import (
MemberIsUnforgeable,
getModuleFromObject,
getTypesFromCallback,
getTypesFromDescriptor,
getTypesFromDictionary,
)
AUTOGENERATED_WARNING_COMMENT = \
"/* THIS FILE IS AUTOGENERATED - DO NOT EDIT */\n\n"
FINALIZE_HOOK_NAME = '_finalize'
TRACE_HOOK_NAME = '_trace'
CONSTRUCT_HOOK_NAME = '_constructor'
HASINSTANCE_HOOK_NAME = '_hasInstance'
def replaceFileIfChanged(filename, newContents):
"""
Read a copy of the old file, so that we don't touch it if it hasn't changed.
Returns True if the file was updated, false otherwise.
"""
# XXXjdm This doesn't play well with make right now.
# Force the file to always be updated, or else changing CodegenRust.py
# will cause many autogenerated bindings to be regenerated perpetually
# until the result is actually different.
# oldFileContents = ""
# try:
# with open(filename, 'rb') as oldFile:
# oldFileContents = ''.join(oldFile.readlines())
# except:
# pass
# if newContents == oldFileContents:
# return False
with open(filename, 'wb') as f:
f.write(newContents)
return True
def toStringBool(arg):
return str(not not arg).lower()
def toBindingNamespace(arg):
return re.sub("((_workers)?$)", "Binding\\1", arg)
def stripTrailingWhitespace(text):
tail = '\n' if text.endswith('\n') else ''
lines = text.splitlines()
for i in range(len(lines)):
lines[i] = lines[i].rstrip()
return '\n'.join(lines) + tail
def MakeNativeName(name):
return name[0].upper() + name[1:]
builtinNames = {
IDLType.Tags.bool: 'bool',
IDLType.Tags.int8: 'i8',
IDLType.Tags.int16: 'i16',
IDLType.Tags.int32: 'i32',
IDLType.Tags.int64: 'i64',
IDLType.Tags.uint8: 'u8',
IDLType.Tags.uint16: 'u16',
IDLType.Tags.uint32: 'u32',
IDLType.Tags.uint64: 'u64',
IDLType.Tags.unrestricted_float: 'f32',
IDLType.Tags.float: 'Finite<f32>',
IDLType.Tags.unrestricted_double: 'f64',
IDLType.Tags.double: 'Finite<f64>'
}
numericTags = [
IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float,
IDLType.Tags.unrestricted_double
]
def unwrapCastableObject(descriptor, source, codeOnFailure, conversionFunction):
"""
A function for unwrapping an object named by the "source" argument
based on the passed-in descriptor. Returns the string of the Rust expression of
the appropriate type.
codeOnFailure is the code to run if unwrapping fails.
"""
args = {
"failureCode": CGIndenter(CGGeneric(codeOnFailure), 8).define(),
"function": conversionFunction,
"source": source,
}
return """\
match %(function)s(%(source)s) {
Ok(val) => val,
Err(()) => {
%(failureCode)s
}
}""" % args
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n#])", re.MULTILINE)
def indent(s, indentLevel=2):
"""
Indent C++ code.
Weird secret feature: this doesn't indent lines that start with # (such as
#include lines or #ifdef/#endif).
"""
if s == "":
return s
return re.sub(lineStartDetector, indentLevel * " ", s)
# dedent() and fill() are often called on the same string multiple
# times. We want to memoize their return values so we don't keep
# recomputing them all the time.
def memoize(fn):
"""
Decorator to memoize a function of one argument. The cache just
grows without bound.
"""
cache = {}
@functools.wraps(fn)
def wrapper(arg):
retval = cache.get(arg)
if retval is None:
retval = cache[arg] = fn(arg)
return retval
return wrapper
@memoize
def dedent(s):
"""
Remove all leading whitespace from s, and remove a blank line
at the beginning.
"""
if s.startswith('\n'):
s = s[1:]
return textwrap.dedent(s)
# This works by transforming the fill()-template to an equivalent
# string.Template.
fill_multiline_substitution_re = re.compile(r"( *)\$\*{(\w+)}(\n)?")
@memoize
def compile_fill_template(template):
"""
Helper function for fill(). Given the template string passed to fill(),
do the reusable part of template processing and return a pair (t,
argModList) that can be used every time fill() is called with that
template argument.
argsModList is list of tuples that represent modifications to be
made to args. Each modification has, in order: i) the arg name,
ii) the modified name, iii) the indent depth.
"""
t = dedent(template)
assert t.endswith("\n") or "\n" not in t
argModList = []
def replace(match):
"""
Replaces a line like ' $*{xyz}\n' with '${xyz_n}',
where n is the indent depth, and add a corresponding entry to
argModList.
Note that this needs to close over argModList, so it has to be
defined inside compile_fill_template().
"""
indentation, name, nl = match.groups()
depth = len(indentation)
# Check that $*{xyz} appears by itself on a line.
prev = match.string[:match.start()]
if (prev and not prev.endswith("\n")) or nl is None:
raise ValueError("Invalid fill() template: $*{%s} must appear by itself on a line" % name)
# Now replace this whole line of template with the indented equivalent.
modified_name = name + "_" + str(depth)
argModList.append((name, modified_name, depth))
return "${" + modified_name + "}"
t = re.sub(fill_multiline_substitution_re, replace, t)
return (string.Template(t), argModList)
def fill(template, **args):
"""
Convenience function for filling in a multiline template.
`fill(template, name1=v1, name2=v2)` is a lot like
`string.Template(template).substitute({"name1": v1, "name2": v2})`.
However, it's shorter, and has a few nice features:
* If `template` is indented, fill() automatically dedents it!
This makes code using fill() with Python's multiline strings
much nicer to look at.
* If `template` starts with a blank line, fill() strips it off.
(Again, convenient with multiline strings.)
* fill() recognizes a special kind of substitution
of the form `$*{name}`.
Use this to paste in, and automatically indent, multiple lines.
(Mnemonic: The `*` is for "multiple lines").
A `$*` substitution must appear by itself on a line, with optional
preceding indentation (spaces only). The whole line is replaced by the
corresponding keyword argument, indented appropriately. If the
argument is an empty string, no output is generated, not even a blank
line.
"""
t, argModList = compile_fill_template(template)
# Now apply argModList to args
for (name, modified_name, depth) in argModList:
if not (args[name] == "" or args[name].endswith("\n")):
raise ValueError("Argument %s with value %r is missing a newline" % (name, args[name]))
args[modified_name] = indent(args[name], depth)
return t.substitute(args)
class CGThing():
"""
Abstract base class for things that spit out code.
"""
def __init__(self):
pass # Nothing for now
def define(self):
"""Produce code for a Rust file."""
raise NotImplementedError # Override me!
class CGMethodCall(CGThing):
"""
A class to generate selection of a method signature from a set of
signatures and generation of a call to that signature.
"""
def __init__(self, argsPre, nativeMethodName, static, descriptor, method):
CGThing.__init__(self)
methodName = '\\"%s.%s\\"' % (descriptor.interface.identifier.name, method.identifier.name)
def requiredArgCount(signature):
arguments = signature[1]
if len(arguments) == 0:
return 0
requiredArgs = len(arguments)
while requiredArgs and arguments[requiredArgs - 1].optional:
requiredArgs -= 1
return requiredArgs
signatures = method.signatures()
def getPerSignatureCall(signature, argConversionStartsAt=0):
signatureIndex = signatures.index(signature)
return CGPerSignatureCall(signature[0], argsPre, signature[1],
nativeMethodName + '_' * signatureIndex,
static, descriptor,
method, argConversionStartsAt)
if len(signatures) == 1:
# Special case: we can just do a per-signature method call
# here for our one signature and not worry about switching
# on anything.
signature = signatures[0]
self.cgRoot = CGList([getPerSignatureCall(signature)])
requiredArgs = requiredArgCount(signature)
if requiredArgs > 0:
code = (
"if argc < %d {\n"
" throw_type_error(cx, \"Not enough arguments to %s.\");\n"
" return false;\n"
"}" % (requiredArgs, methodName))
self.cgRoot.prepend(
CGWrapper(CGGeneric(code), pre="\n", post="\n"))
return
# Need to find the right overload
maxArgCount = method.maxArgCount
allowedArgCounts = method.allowedArgCounts
argCountCases = []
for argCount in allowedArgCounts:
possibleSignatures = method.signaturesForArgCount(argCount)
if len(possibleSignatures) == 1:
# easy case!
signature = possibleSignatures[0]
argCountCases.append(CGCase(str(argCount), getPerSignatureCall(signature)))
continue
distinguishingIndex = method.distinguishingIndexForArgCount(argCount)
# We can't handle unions at the distinguishing index.
for (returnType, args) in possibleSignatures:
if args[distinguishingIndex].type.isUnion():
raise TypeError("No support for unions as distinguishing "
"arguments yet: %s",
args[distinguishingIndex].location)
# Convert all our arguments up to the distinguishing index.
# Doesn't matter which of the possible signatures we use, since
# they all have the same types up to that point; just use
# possibleSignatures[0]
caseBody = [
CGArgumentConverter(possibleSignatures[0][1][i],
i, "args", "argc", descriptor)
for i in range(0, distinguishingIndex)]
# Select the right overload from our set.
distinguishingArg = "args.get(%d)" % distinguishingIndex
def pickFirstSignature(condition, filterLambda):
sigs = filter(filterLambda, possibleSignatures)
assert len(sigs) < 2
if len(sigs) > 0:
call = getPerSignatureCall(sigs[0], distinguishingIndex)
if condition is None:
caseBody.append(call)
else:
caseBody.append(CGGeneric("if " + condition + " {"))
caseBody.append(CGIndenter(call))
caseBody.append(CGGeneric("}"))
return True
return False
# First check for null or undefined
pickFirstSignature("%s.isNullOrUndefined()" % distinguishingArg,
lambda s: (s[1][distinguishingIndex].type.nullable() or
s[1][distinguishingIndex].type.isDictionary()))
# Now check for distinguishingArg being an object that implements a
# non-callback interface. That includes typed arrays and
# arraybuffers.
interfacesSigs = [
s for s in possibleSignatures
if (s[1][distinguishingIndex].type.isObject() or
s[1][distinguishingIndex].type.isNonCallbackInterface())]
# There might be more than one of these; we need to check
# which ones we unwrap to.
if len(interfacesSigs) > 0:
# The spec says that we should check for "platform objects
# implementing an interface", but it's enough to guard on these
# being an object. The code for unwrapping non-callback
# interfaces and typed arrays will just bail out and move on to
# the next overload if the object fails to unwrap correctly. We
# could even not do the isObject() check up front here, but in
# cases where we have multiple object overloads it makes sense
# to do it only once instead of for each overload. That will
# also allow the unwrapping test to skip having to do codegen
# for the null-or-undefined case, which we already handled
# above.
caseBody.append(CGGeneric("if %s.get().is_object() {" %
(distinguishingArg)))
for idx, sig in enumerate(interfacesSigs):
caseBody.append(CGIndenter(CGGeneric("loop {")))
type = sig[1][distinguishingIndex].type
# The argument at index distinguishingIndex can't possibly
# be unset here, because we've already checked that argc is
# large enough that we can examine this argument.
info = getJSToNativeConversionInfo(
type, descriptor, failureCode="break;", isDefinitelyObject=True)
template = info.template
declType = info.declType
testCode = instantiateJSToNativeConversionTemplate(
template,
{"val": distinguishingArg},
declType,
"arg%d" % distinguishingIndex)
# Indent by 4, since we need to indent further than our "do" statement
caseBody.append(CGIndenter(testCode, 4))
# If we got this far, we know we unwrapped to the right
# interface, so just do the call. Start conversion with
# distinguishingIndex + 1, since we already converted
# distinguishingIndex.
caseBody.append(CGIndenter(
getPerSignatureCall(sig, distinguishingIndex + 1), 4))
caseBody.append(CGIndenter(CGGeneric("}")))
caseBody.append(CGGeneric("}"))
# XXXbz Now we're supposed to check for distinguishingArg being
# an array or a platform object that supports indexed
# properties... skip that last for now. It's a bit of a pain.
pickFirstSignature("%s.get().isObject() && IsArrayLike(cx, &%s.get().toObject())" %
(distinguishingArg, distinguishingArg),
lambda s:
(s[1][distinguishingIndex].type.isArray() or
s[1][distinguishingIndex].type.isSequence() or
s[1][distinguishingIndex].type.isObject()))
# Check for Date objects
# XXXbz Do we need to worry about security wrappers around the Date?
pickFirstSignature("%s.get().isObject() && JS_ObjectIsDate(cx, &%s.get().toObject())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isDate() or
s[1][distinguishingIndex].type.isObject()))
# Check for vanilla JS objects
# XXXbz Do we need to worry about security wrappers?
pickFirstSignature("%s.get().is_object() && !is_platform_object(%s.get().to_object())" %
(distinguishingArg, distinguishingArg),
lambda s: (s[1][distinguishingIndex].type.isCallback() or
s[1][distinguishingIndex].type.isCallbackInterface() or
s[1][distinguishingIndex].type.isDictionary() or
s[1][distinguishingIndex].type.isObject()))
# The remaining cases are mutually exclusive. The
# pickFirstSignature calls are what change caseBody
# Check for strings or enums
if pickFirstSignature(None,
lambda s: (s[1][distinguishingIndex].type.isString() or
s[1][distinguishingIndex].type.isEnum())):
pass
# Check for primitives
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isPrimitive()):
pass
# Check for "any"
elif pickFirstSignature(None,
lambda s: s[1][distinguishingIndex].type.isAny()):
pass
else:
# Just throw; we have no idea what we're supposed to
# do with this.
caseBody.append(CGGeneric("return Throw(cx, NS_ERROR_XPC_BAD_CONVERT_JS);"))
argCountCases.append(CGCase(str(argCount),
CGList(caseBody, "\n")))
overloadCGThings = []
overloadCGThings.append(
CGGeneric("let argcount = cmp::min(argc, %d);" %
maxArgCount))
overloadCGThings.append(
CGSwitch("argcount",
argCountCases,
CGGeneric("throw_type_error(cx, \"Not enough arguments to %s.\");\n"
"return false;" % methodName)))
# XXXjdm Avoid unreachable statement warnings
# overloadCGThings.append(
# CGGeneric('panic!("We have an always-returning default case");\n'
# 'return false;'))
self.cgRoot = CGWrapper(CGList(overloadCGThings, "\n"),
pre="\n")
def define(self):
return self.cgRoot.define()
def dictionaryHasSequenceMember(dictionary):
return (any(typeIsSequenceOrHasSequenceMember(m.type) for m in
dictionary.members) or
(dictionary.parent and
dictionaryHasSequenceMember(dictionary.parent)))
def typeIsSequenceOrHasSequenceMember(type):
if type.nullable():
type = type.inner
if type.isSequence():
return True
if type.isArray():
elementType = type.inner
return typeIsSequenceOrHasSequenceMember(elementType)
if type.isDictionary():
return dictionaryHasSequenceMember(type.inner)
if type.isUnion():
return any(typeIsSequenceOrHasSequenceMember(m.type) for m in
type.flatMemberTypes)
return False
def typeNeedsRooting(type, descriptorProvider):
return (type.isGeckoInterface() and
descriptorProvider.getDescriptor(type.unroll().inner.identifier.name).needsRooting)
def union_native_type(t):
name = t.unroll().name
return 'UnionTypes::%s' % name
class JSToNativeConversionInfo():
"""
An object representing information about a JS-to-native conversion.
"""
def __init__(self, template, default=None, declType=None,
needsRooting=False):
"""
template: A string representing the conversion code. This will have
template substitution performed on it as follows:
${val} is a handle to the JS::Value in question
default: A string or None representing rust code for default value(if any).
declType: A CGThing representing the native C++ type we're converting
to. This is allowed to be None if the conversion code is
supposed to be used as-is.
needsRooting: A boolean indicating whether the caller has to root
the result
"""
assert isinstance(template, str)
assert declType is None or isinstance(declType, CGThing)
self.template = template
self.default = default
self.declType = declType
self.needsRooting = needsRooting
def getJSToNativeConversionInfo(type, descriptorProvider, failureCode=None,
isDefinitelyObject=False,
isMember=False,
isArgument=False,
invalidEnumValueFatal=True,
defaultValue=None,
treatNullAs="Default",
isEnforceRange=False,
isClamp=False,
exceptionCode=None,
allowTreatNonObjectAsNull=False,
isCallbackReturnValue=False,
sourceDescription="value"):
"""
Get a template for converting a JS value to a native object based on the
given type and descriptor. If failureCode is given, then we're actually
testing whether we can convert the argument to the desired type. That
means that failures to convert due to the JS value being the wrong type of
value need to use failureCode instead of throwing exceptions. Failures to
convert that are due to JS exceptions (from toString or valueOf methods) or
out of memory conditions need to throw exceptions no matter what
failureCode is.
If isDefinitelyObject is True, that means we know the value
isObject() and we have no need to recheck that.
if isMember is True, we're being converted from a property of some
JS object, not from an actual method argument, so we can't rely on
our jsval being rooted or outliving us in any way. Any caller
passing true needs to ensure that it is handled correctly in
typeIsSequenceOrHasSequenceMember.
invalidEnumValueFatal controls whether an invalid enum value conversion
attempt will throw (if true) or simply return without doing anything (if
false).
If defaultValue is not None, it's the IDL default value for this conversion
If isEnforceRange is true, we're converting an integer and throwing if the
value is out of range.
If isClamp is true, we're converting an integer and clamping if the
value is out of range.
If allowTreatNonObjectAsNull is true, then [TreatNonObjectAsNull]
extended attributes on nullable callback functions will be honored.
The return value from this function is an object of JSToNativeConversionInfo consisting of four things:
1) A string representing the conversion code. This will have template
substitution performed on it as follows:
${val} replaced by an expression for the JS::Value in question
2) A string or None representing Rust code for the default value (if any).
3) A CGThing representing the native C++ type we're converting to
(declType). This is allowed to be None if the conversion code is
supposed to be used as-is.
4) A boolean indicating whether the caller has to root the result.
"""
# We should not have a defaultValue if we know we're an object
assert not isDefinitelyObject or defaultValue is None
# If exceptionCode is not set, we'll just rethrow the exception we got.
# Note that we can't just set failureCode to exceptionCode, because setting
# failureCode will prevent pending exceptions from being set in cases when
# they really should be!
if exceptionCode is None:
exceptionCode = "return false;"
needsRooting = typeNeedsRooting(type, descriptorProvider)
def handleOptional(template, declType, default):
assert (defaultValue is None) == (default is None)
return JSToNativeConversionInfo(template, default, declType, needsRooting=needsRooting)
# Unfortunately, .capitalize() on a string will lowercase things inside the
# string, which we do not want.
def firstCap(string):
return string[0].upper() + string[1:]
# Helper functions for dealing with failures due to the JS value being the
# wrong type of value.
def onFailureNotAnObject(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, "%s is not an object.");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))),
post="\n")
def onFailureNotCallable(failureCode):
return CGWrapper(
CGGeneric(
failureCode or
('throw_type_error(cx, \"%s is not callable.\");\n'
'%s' % (firstCap(sourceDescription), exceptionCode))))
# A helper function for handling null default values. Checks that the
# default value, if it exists, is null.
def handleDefaultNull(nullValue):
if defaultValue is None:
return None
if not isinstance(defaultValue, IDLNullValue):
raise TypeError("Can't handle non-null default value here")
assert type.nullable() or type.isDictionary()
return nullValue
# A helper function for wrapping up the template body for
# possibly-nullable objecty stuff
def wrapObjectTemplate(templateBody, nullValue, isDefinitelyObject, type,
failureCode=None):
if not isDefinitelyObject:
# Handle the non-object cases by wrapping up the whole
# thing in an if cascade.
templateBody = (
"if ${val}.get().is_object() {\n" +
CGIndenter(CGGeneric(templateBody)).define() + "\n")
if type.nullable():
templateBody += (
"} else if ${val}.get().is_null_or_undefined() {\n"
" %s\n") % nullValue
templateBody += (
"} else {\n" +
CGIndenter(onFailureNotAnObject(failureCode)).define() +
"}")
return templateBody
assert not (isEnforceRange and isClamp) # These are mutually exclusive
if type.isArray():
raise TypeError("Can't handle array arguments yet")
if type.isSequence():
# Use the same type that for return values
declType = getRetvalDeclarationForType(type, descriptorProvider)
config = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % (config, exceptionCode))
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isUnion():
declType = CGGeneric(union_native_type(type))
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=" >")
templateBody = ("match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(value) => value,\n"
" Err(()) => { %s },\n"
"}" % exceptionCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isGeckoInterface():
assert not isEnforceRange and not isClamp
descriptor = descriptorProvider.getDescriptor(
type.unroll().inner.identifier.name)
if descriptor.interface.isCallback():
name = descriptor.nativeType
declType = CGWrapper(CGGeneric(name), pre="Rc<", post=">")
template = "%s::new(${val}.get().to_object())" % name
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = wrapObjectTemplate("Some(%s)" % template, "None",
isDefinitelyObject, type,
failureCode)
return handleOptional(template, declType, handleDefaultNull("None"))
conversionFunction = "root_from_handlevalue"
descriptorType = descriptor.returnType
if isMember == "Variadic":
conversionFunction = "native_from_handlevalue"
descriptorType = descriptor.nativeType
elif isArgument:
descriptorType = descriptor.argumentType
templateBody = ""
if descriptor.interface.isConsequential():
raise TypeError("Consequential interface %s being used as an "
"argument" % descriptor.interface.identifier.name)
if failureCode is None:
substitutions = {
"sourceDescription": sourceDescription,
"interface": descriptor.interface.identifier.name,
"exceptionCode": exceptionCode,
}
unwrapFailureCode = string.Template(
'throw_type_error(cx, "${sourceDescription} does not '
'implement interface ${interface}.");\n'
'${exceptionCode}').substitute(substitutions)
else:
unwrapFailureCode = failureCode
templateBody = unwrapCastableObject(
descriptor, "${val}", unwrapFailureCode, conversionFunction)
declType = CGGeneric(descriptorType)
if type.nullable():
templateBody = "Some(%s)" % templateBody
declType = CGWrapper(declType, pre="Option<", post=">")
templateBody = wrapObjectTemplate(templateBody, "None",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType, handleDefaultNull("None"))
if type.isSpiderMonkeyInterface():
raise TypeError("Can't handle SpiderMonkey interface arguments yet")
if type.isDOMString():
nullBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % (nullBehavior, exceptionCode))
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = 'DOMString::from("%s")' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "DOMString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isUSVString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
assert type.nullable()
default = "None"
else:
assert defaultValue.type.tag() in (IDLType.Tags.domstring, IDLType.Tags.usvstring)
default = 'USVString("%s".to_owned())' % defaultValue.value
if type.nullable():
default = "Some(%s)" % default
declType = "USVString"
if type.nullable():
declType = "Option<%s>" % declType
return handleOptional(conversionCode, CGGeneric(declType), default)
if type.isByteString():
assert not isEnforceRange and not isClamp
conversionCode = (
"match FromJSValConvertible::from_jsval(cx, ${val}, ()) {\n"
" Ok(strval) => strval,\n"
" Err(_) => { %s },\n"
"}" % exceptionCode)
declType = CGGeneric("ByteString")
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
return handleOptional(conversionCode, declType, handleDefaultNull("None"))
if type.isEnum():
assert not isEnforceRange and not isClamp
if type.nullable():
raise TypeError("We don't support nullable enumerated arguments "
"yet")
enum = type.inner.identifier.name
if invalidEnumValueFatal:
handleInvalidEnumValueCode = exceptionCode
else:
handleInvalidEnumValueCode = "return true;"
template = (
"match find_enum_string_index(cx, ${val}, %(values)s) {\n"
" Err(_) => { %(exceptionCode)s },\n"
" Ok(None) => { %(handleInvalidEnumValueCode)s },\n"
" Ok(Some(index)) => {\n"
" //XXXjdm need some range checks up in here.\n"
" mem::transmute(index)\n"
" },\n"
"}" % {"values": enum + "Values::strings",
"exceptionCode": exceptionCode,
"handleInvalidEnumValueCode": handleInvalidEnumValueCode})
if defaultValue is not None:
assert defaultValue.type.tag() == IDLType.Tags.domstring
default = "%s::%s" % (enum, getEnumValueName(defaultValue.value))
else:
default = None
return handleOptional(template, CGGeneric(enum), default)
if type.isCallback():
assert not isEnforceRange and not isClamp
assert not type.treatNonCallableAsNull()
assert not type.treatNonObjectAsNull() or type.nullable()
assert not type.treatNonObjectAsNull() or not type.treatNonCallableAsNull()
callback = type.unroll().callback
declType = CGGeneric('%s::%s' % (getModuleFromObject(callback), callback.identifier.name))
finalDeclType = CGTemplatedType("Rc", declType)
conversion = CGCallbackTempRoot(declType.define())
if type.nullable():
declType = CGTemplatedType("Option", declType)
finalDeclType = CGTemplatedType("Option", finalDeclType)
conversion = CGWrapper(conversion, pre="Some(", post=")")
if allowTreatNonObjectAsNull and type.treatNonObjectAsNull():
if not isDefinitelyObject:
haveObject = "${val}.get().is_object()"
template = CGIfElseWrapper(haveObject,
conversion,
CGGeneric("None")).define()
else:
template = conversion
else:
template = CGIfElseWrapper("IsCallable(${val}.get().to_object())",
conversion,
onFailureNotCallable(failureCode)).define()
template = wrapObjectTemplate(
template,
"None",
isDefinitelyObject,
type,
failureCode)
if defaultValue is not None:
assert allowTreatNonObjectAsNull
assert type.treatNonObjectAsNull()
assert type.nullable()
assert isinstance(defaultValue, IDLNullValue)
default = "None"
else:
default = None
return JSToNativeConversionInfo(template, default, finalDeclType, needsRooting=needsRooting)
if type.isAny():
assert not isEnforceRange and not isClamp
declType = ""
default = ""
if isMember == "Dictionary":
# TODO: Need to properly root dictionaries
# https://github.com/servo/servo/issues/6381
declType = CGGeneric("JSVal")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "NullValue()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "UndefinedValue()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
else:
declType = CGGeneric("HandleValue")
if defaultValue is None:
default = None
elif isinstance(defaultValue, IDLNullValue):
default = "HandleValue::null()"
elif isinstance(defaultValue, IDLUndefinedValue):
default = "HandleValue::undefined()"
else:
raise TypeError("Can't handle non-null, non-undefined default value here")
return handleOptional("${val}", declType, default)
if type.isObject():
assert not isEnforceRange and not isClamp
# TODO: Need to root somehow
# https://github.com/servo/servo/issues/6382
declType = CGGeneric("*mut JSObject")
templateBody = wrapObjectTemplate("${val}.get().to_object()",
"ptr::null_mut()",
isDefinitelyObject, type, failureCode)
return handleOptional(templateBody, declType,
handleDefaultNull("ptr::null_mut()"))
if type.isDictionary():
if failureCode is not None:
raise TypeError("Can't handle dictionaries when failureCode is not None")
# There are no nullable dictionaries
assert not type.nullable()
typeName = "%s::%s" % (CGDictionary.makeModuleName(type.inner),
CGDictionary.makeDictionaryName(type.inner))
declType = CGGeneric(typeName)
template = ("match %s::new(cx, ${val}) {\n"
" Ok(dictionary) => dictionary,\n"
" Err(_) => { %s },\n"
"}" % (typeName, exceptionCode))
return handleOptional(template, declType, handleDefaultNull("%s::empty(cx)" % typeName))
if type.isVoid():
# This one only happens for return values, and its easy: Just
# ignore the jsval.
return JSToNativeConversionInfo("", None, None, needsRooting=False)
if not type.isPrimitive():
raise TypeError("Need conversion for argument type '%s'" % str(type))
conversionBehavior = getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs)
if failureCode is None:
failureCode = 'return false'
declType = CGGeneric(builtinNames[type.tag()])
if type.nullable():
declType = CGWrapper(declType, pre="Option<", post=">")
template = (
"match FromJSValConvertible::from_jsval(cx, ${val}, %s) {\n"
" Ok(v) => v,\n"
" Err(_) => { %s }\n"
"}" % (conversionBehavior, exceptionCode))
if defaultValue is not None:
if isinstance(defaultValue, IDLNullValue):
assert type.nullable()
defaultStr = "None"
else:
tag = defaultValue.type.tag()
if tag in [IDLType.Tags.float, IDLType.Tags.double]:
defaultStr = "Finite::wrap(%s)" % defaultValue.value
elif tag in numericTags:
defaultStr = str(defaultValue.value)
else:
assert tag == IDLType.Tags.bool
defaultStr = toStringBool(defaultValue.value)
if type.nullable():
defaultStr = "Some(%s)" % defaultStr
else:
defaultStr = None
return handleOptional(template, declType, defaultStr)
def instantiateJSToNativeConversionTemplate(templateBody, replacements,
declType, declName):
"""
Take the templateBody and declType as returned by
getJSToNativeConversionInfo, a set of replacements as required by the
strings in such a templateBody, and a declName, and generate code to
convert into a stack Rust binding with that name.
"""
result = CGList([], "\n")
conversion = CGGeneric(string.Template(templateBody).substitute(replacements))
if declType is not None:
newDecl = [
CGGeneric("let "),
CGGeneric(declName),
CGGeneric(": "),
declType,
CGGeneric(" = "),
conversion,
CGGeneric(";"),
]
result.append(CGList(newDecl))
else:
result.append(conversion)
# Add an empty CGGeneric to get an extra newline after the argument
# conversion.
result.append(CGGeneric(""))
return result
def convertConstIDLValueToJSVal(value):
if isinstance(value, IDLNullValue):
return "NullVal"
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8, IDLType.Tags.int16,
IDLType.Tags.uint16, IDLType.Tags.int32]:
return "IntVal(%s)" % (value.value)
if tag == IDLType.Tags.uint32:
return "UintVal(%s)" % (value.value)
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64]:
return "DoubleVal(%s)" % (value.value)
if tag == IDLType.Tags.bool:
return "BoolVal(true)" if value.value else "BoolVal(false)"
if tag in [IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return "DoubleVal(%s)" % (value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGArgumentConverter(CGThing):
"""
A class that takes an IDL argument object, its index in the
argument list, and the argv and argc strings and generates code to
unwrap the argument to the right native type.
"""
def __init__(self, argument, index, args, argc, descriptorProvider,
invalidEnumValueFatal=True):
CGThing.__init__(self)
assert not argument.defaultValue or argument.optional
replacer = {
"index": index,
"argc": argc,
"args": args
}
replacementVariables = {
"val": string.Template("${args}.get(${index})").substitute(replacer),
}
info = getJSToNativeConversionInfo(
argument.type,
descriptorProvider,
invalidEnumValueFatal=invalidEnumValueFatal,
defaultValue=argument.defaultValue,
treatNullAs=argument.treatNullAs,
isEnforceRange=argument.enforceRange,
isClamp=argument.clamp,
isMember="Variadic" if argument.variadic else False,
allowTreatNonObjectAsNull=argument.allowTreatNonCallableAsNull())
template = info.template
default = info.default
declType = info.declType
if not argument.variadic:
if argument.optional:
condition = "{args}.get({index}).is_undefined()".format(**replacer)
if argument.defaultValue:
assert default
template = CGIfElseWrapper(condition,
CGGeneric(default),
CGGeneric(template)).define()
else:
assert not default
declType = CGWrapper(declType, pre="Option<", post=">")
template = CGIfElseWrapper(condition,
CGGeneric("None"),
CGGeneric("Some(%s)" % template)).define()
else:
assert not default
self.converter = instantiateJSToNativeConversionTemplate(
template, replacementVariables, declType, "arg%d" % index)
else:
assert argument.optional
variadicConversion = {
"val": string.Template("${args}.get(variadicArg)").substitute(replacer),
}
innerConverter = [instantiateJSToNativeConversionTemplate(
template, variadicConversion, declType, "slot")]
arg = "arg%d" % index
if argument.type.isGeckoInterface():
vec = "RootedVec::new()"
innerConverter.append(CGGeneric("%s.push(JS::from_ref(&*slot));" % arg))
else:
vec = "vec![]"
innerConverter.append(CGGeneric("%s.push(slot);" % arg))
inner = CGIndenter(CGList(innerConverter, "\n"), 8).define()
self.converter = CGGeneric("""\
let mut %(arg)s = %(vec)s;
if %(argc)s > %(index)s {
%(arg)s.reserve(%(argc)s as usize - %(index)s);
for variadicArg in %(index)s..%(argc)s {
%(inner)s
}
}""" % {'arg': arg, 'argc': argc, 'index': index, 'inner': inner, 'vec': vec})
def define(self):
return self.converter.define()
def wrapForType(jsvalRef, result='result', successCode='return true;', pre=''):
"""
Reflect a Rust value into JS.
* 'jsvalRef': a MutableHandleValue in which to store the result
of the conversion;
* 'result': the name of the variable in which the Rust value is stored;
* 'successCode': the code to run once we have done the conversion.
* 'pre': code to run before the conversion if rooting is necessary
"""
wrap = "%s\n(%s).to_jsval(cx, %s);" % (pre, result, jsvalRef)
if successCode:
wrap += "\n%s" % successCode
return wrap
def typeNeedsCx(type, retVal=False):
if type is None:
return False
if type.nullable():
type = type.inner
if type.isSequence() or type.isArray():
type = type.inner
if type.isUnion():
return any(typeNeedsCx(t) for t in type.unroll().flatMemberTypes)
if retVal and type.isSpiderMonkeyInterface():
return True
return type.isAny() or type.isObject()
# Returns a conversion behavior suitable for a type
def getConversionConfigForType(type, isEnforceRange, isClamp, treatNullAs):
if type.isSequence():
return getConversionConfigForType(type.unroll(), isEnforceRange, isClamp, treatNullAs)
if type.isDOMString():
assert not isEnforceRange and not isClamp
treatAs = {
"Default": "StringificationBehavior::Default",
"EmptyString": "StringificationBehavior::Empty",
}
if treatNullAs not in treatAs:
raise TypeError("We don't support [TreatNullAs=%s]" % treatNullAs)
if type.nullable():
# Note: the actual behavior passed here doesn't matter for nullable
# strings.
return "StringificationBehavior::Default"
else:
return treatAs[treatNullAs]
if type.isInteger():
if isEnforceRange:
return "ConversionBehavior::EnforceRange"
elif isClamp:
return "ConversionBehavior::Clamp"
else:
return "ConversionBehavior::Default"
assert not isEnforceRange and not isClamp
return "()"
# Returns a CGThing containing the type of the return value.
def getRetvalDeclarationForType(returnType, descriptorProvider):
if returnType is None or returnType.isVoid():
# Nothing to declare
return CGGeneric("()")
if returnType.isPrimitive() and returnType.tag() in builtinNames:
result = CGGeneric(builtinNames[returnType.tag()])
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDOMString():
result = CGGeneric("DOMString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUSVString():
result = CGGeneric("USVString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isByteString():
result = CGGeneric("ByteString")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isEnum():
result = CGGeneric(returnType.unroll().inner.identifier.name)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isGeckoInterface():
descriptor = descriptorProvider.getDescriptor(
returnType.unroll().inner.identifier.name)
result = CGGeneric(descriptor.returnType)
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isCallback():
callback = returnType.unroll().callback
result = CGGeneric('Rc<%s::%s>' % (getModuleFromObject(callback), callback.identifier.name))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isUnion():
result = CGGeneric(union_native_type(returnType))
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
# TODO: Return the value through a MutableHandleValue outparam
# https://github.com/servo/servo/issues/6307
if returnType.isAny():
return CGGeneric("JSVal")
if returnType.isObject() or returnType.isSpiderMonkeyInterface():
return CGGeneric("*mut JSObject")
if returnType.isSequence():
inner = returnType.unroll()
result = getRetvalDeclarationForType(inner, descriptorProvider)
result = CGWrapper(result, pre="Vec<", post=">")
if returnType.nullable():
result = CGWrapper(result, pre="Option<", post=">")
return result
if returnType.isDictionary():
nullable = returnType.nullable()
dictName = returnType.inner.name if nullable else returnType.name
result = CGGeneric(dictName)
if typeNeedsRooting(returnType, descriptorProvider):
raise TypeError("We don't support rootable dictionaries return values")
if nullable:
result = CGWrapper(result, pre="Option<", post=">")
return result
raise TypeError("Don't know how to declare return value for %s" %
returnType)
class PropertyDefiner:
"""
A common superclass for defining things on prototype objects.
Subclasses should implement generateArray to generate the actual arrays of
things we're defining. They should also set self.regular to the list of
things exposed to web pages.
"""
def __init__(self, descriptor, name):
self.descriptor = descriptor
self.name = name
def variableName(self):
return "s" + self.name
def length(self):
return len(self.regular)
def __str__(self):
# We only need to generate id arrays for things that will end
# up used via ResolveProperty or EnumerateProperties.
return self.generateArray(self.regular, self.variableName())
def generatePrefableArray(self, array, name, specTemplate, specTerminator,
specType, getDataTuple):
"""
This method generates our various arrays.
array is an array of interface members as passed to generateArray
name is the name as passed to generateArray
specTemplate is a template for each entry of the spec array
specTerminator is a terminator for the spec array (inserted at the end
of the array), or None
specType is the actual typename of our spec
getDataTuple is a callback function that takes an array entry and
returns a tuple suitable for substitution into specTemplate.
"""
assert len(array) != 0
specs = []
for member in array:
specs.append(specTemplate % getDataTuple(member))
if specTerminator:
specs.append(specTerminator)
return (("const %s: &'static [%s] = &[\n" +
",\n".join(specs) + "\n" +
"];\n") % (name, specType))
# The length of a method is the minimum of the lengths of the
# argument lists of all its overloads.
def methodLength(method):
signatures = method.signatures()
return min(
len([arg for arg in arguments if not arg.optional and not arg.variadic])
for (_, arguments) in signatures)
class MethodDefiner(PropertyDefiner):
"""
A class for defining methods on a prototype object.
"""
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
# FIXME https://bugzilla.mozilla.org/show_bug.cgi?id=772822
# We should be able to check for special operations without an
# identifier. For now we check if the name starts with __
# Ignore non-static methods for callback interfaces
if not descriptor.interface.isCallback() or static:
methods = [m for m in descriptor.interface.members if
m.isMethod() and m.isStatic() == static and
not m.isIdentifierLess() and
MemberIsUnforgeable(m, descriptor) == unforgeable]
else:
methods = []
self.regular = [{"name": m.identifier.name,
"methodInfo": not m.isStatic(),
"length": methodLength(m)} for m in methods]
# FIXME Check for an existing iterator on the interface first.
if any(m.isGetter() and m.isIndexed() for m in methods):
self.regular.append({"name": '@@iterator',
"methodInfo": False,
"selfHostedName": "ArrayValues",
"length": 0})
isUnforgeableInterface = bool(descriptor.interface.getExtendedAttribute("Unforgeable"))
if not static and unforgeable == isUnforgeableInterface:
stringifier = descriptor.operations['Stringifier']
if stringifier:
self.regular.append({
"name": "toString",
"nativeName": stringifier.identifier.name,
"length": 0,
})
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE"
if self.unforgeable:
flags += " | JSPROP_PERMANENT | JSPROP_READONLY"
def specData(m):
# TODO: Use something like JS_FNSPEC
# https://github.com/servo/servo/issues/6391
if "selfHostedName" in m:
selfHostedName = '%s as *const u8 as *const libc::c_char' % str_to_const_array(m["selfHostedName"])
assert not m.get("methodInfo", True)
accessor = "None"
jitinfo = "0 as *const JSJitInfo"
else:
selfHostedName = "0 as *const libc::c_char"
if m.get("methodInfo", True):
identifier = m.get("nativeName", m["name"])
# Go through an intermediate type here, because it's not
# easy to tell whether the methodinfo is a JSJitInfo or
# a JSTypedMethodJitInfo here. The compiler knows, though,
# so let it do the work.
jitinfo = "&%s_methodinfo as *const _ as *const JSJitInfo" % identifier
accessor = "Some(generic_method)"
else:
jitinfo = "0 as *const JSJitInfo"
accessor = 'Some(%s)' % m.get("nativeName", m["name"])
if m["name"].startswith("@@"):
return ('(SymbolCode::%s as i32 + 1)'
% m["name"][2:], accessor, jitinfo, m["length"], flags, selfHostedName)
return (str_to_const_array(m["name"]), accessor, jitinfo, m["length"], flags, selfHostedName)
return self.generatePrefableArray(
array, name,
' JSFunctionSpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: %s, info: %s },\n'
' nargs: %s,\n'
' flags: (%s) as u16,\n'
' selfHostedName: %s\n'
' }',
' JSFunctionSpec {\n'
' name: 0 as *const libc::c_char,\n'
' call: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' nargs: 0,\n'
' flags: 0,\n'
' selfHostedName: 0 as *const libc::c_char\n'
' }',
'JSFunctionSpec',
specData)
class AttrDefiner(PropertyDefiner):
def __init__(self, descriptor, name, static, unforgeable):
assert not (static and unforgeable)
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.descriptor = descriptor
self.regular = [
m
for m in descriptor.interface.members if
m.isAttr() and m.isStatic() == static and
MemberIsUnforgeable(m, descriptor) == unforgeable
]
self.static = static
self.unforgeable = unforgeable
def generateArray(self, array, name):
if len(array) == 0:
return ""
flags = "JSPROP_ENUMERATE | JSPROP_SHARED"
if self.unforgeable:
flags += " | JSPROP_READONLY | JSPROP_PERMANENT"
def getter(attr):
if self.static:
accessor = 'get_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_getter"
else:
accessor = "generic_getter"
jitinfo = "&%s_getterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def setter(attr):
if attr.readonly and not attr.getExtendedAttribute("PutForwards"):
return "JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }"
if self.static:
accessor = 'set_' + self.descriptor.internalNameFor(attr.identifier.name)
jitinfo = "0 as *const JSJitInfo"
else:
if attr.hasLenientThis():
accessor = "generic_lenient_setter"
else:
accessor = "generic_setter"
jitinfo = "&%s_setterinfo" % self.descriptor.internalNameFor(attr.identifier.name)
return ("JSNativeWrapper { op: Some(%(native)s), info: %(info)s }"
% {"info": jitinfo,
"native": accessor})
def specData(attr):
return (str_to_const_array(attr.identifier.name), flags, getter(attr),
setter(attr))
return self.generatePrefableArray(
array, name,
' JSPropertySpec {\n'
' name: %s as *const u8 as *const libc::c_char,\n'
' flags: ((%s) & 0xFF) as u8,\n'
' getter: %s,\n'
' setter: %s\n'
' }',
' JSPropertySpec {\n'
' name: 0 as *const libc::c_char,\n'
' flags: 0,\n'
' getter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo },\n'
' setter: JSNativeWrapper { op: None, info: 0 as *const JSJitInfo }\n'
' }',
'JSPropertySpec',
specData)
class ConstDefiner(PropertyDefiner):
"""
A class for definining constants on the interface object
"""
def __init__(self, descriptor, name):
PropertyDefiner.__init__(self, descriptor, name)
self.name = name
self.regular = [m for m in descriptor.interface.members if m.isConst()]
def generateArray(self, array, name):
if len(array) == 0:
return ""
def specData(const):
return (str_to_const_array(const.identifier.name),
convertConstIDLValueToJSVal(const.value))
return self.generatePrefableArray(
array, name,
' ConstantSpec { name: %s, value: %s }',
None,
'ConstantSpec',
specData)
# We'll want to insert the indent at the beginnings of lines, but we
# don't want to indent empty lines. So only indent lines that have a
# non-newline character on them.
lineStartDetector = re.compile("^(?=[^\n])", re.MULTILINE)
class CGIndenter(CGThing):
"""
A class that takes another CGThing and generates code that indents that
CGThing by some number of spaces. The default indent is two spaces.
"""
def __init__(self, child, indentLevel=4):
CGThing.__init__(self)
self.child = child
self.indent = " " * indentLevel
def define(self):
defn = self.child.define()
if defn != "":
return re.sub(lineStartDetector, self.indent, defn)
else:
return defn
class CGWrapper(CGThing):
"""
Generic CGThing that wraps other CGThings with pre and post text.
"""
def __init__(self, child, pre="", post="", reindent=False):
CGThing.__init__(self)
self.child = child
self.pre = pre
self.post = post
self.reindent = reindent
def define(self):
defn = self.child.define()
if self.reindent:
# We don't use lineStartDetector because we don't want to
# insert whitespace at the beginning of our _first_ line.
defn = stripTrailingWhitespace(
defn.replace("\n", "\n" + (" " * len(self.pre))))
return self.pre + defn + self.post
class CGImports(CGWrapper):
"""
Generates the appropriate import/use statements.
"""
def __init__(self, child, descriptors, callbacks, imports, ignored_warnings=None):
"""
Adds a set of imports.
"""
if ignored_warnings is None:
ignored_warnings = [
'non_camel_case_types',
'non_upper_case_globals',
'unused_imports',
'unused_variables',
'unused_assignments',
]
def componentTypes(type):
if type.nullable():
type = type.unroll()
if type.isUnion():
return type.flatMemberTypes
return [type]
def isImportable(type):
if not type.isType():
assert type.isInterface()
return not type.isCallback()
return type.isNonCallbackInterface() and not type.builtin
def relatedTypesForSignatures(method):
types = []
for (returnType, arguments) in method.signatures():
types += componentTypes(returnType)
for arg in arguments:
types += componentTypes(arg.type)
return types
def getIdentifier(t):
if t.isType():
return t.inner.identifier
assert t.isInterface()
return t.identifier
types = []
for d in descriptors:
types += [d.interface]
members = d.interface.members + d.interface.namedConstructors
constructor = d.interface.ctor()
if constructor:
members += [constructor]
if d.proxy:
members += [o for o in d.operations.values() if o]
for m in members:
if m.isMethod():
types += relatedTypesForSignatures(m)
elif m.isAttr():
types += componentTypes(m.type)
for c in callbacks:
types += relatedTypesForSignatures(c)
imports += ['dom::types::%s' % getIdentifier(t).name for t in types if isImportable(t)]
statements = []
if len(ignored_warnings) > 0:
statements.append('#![allow(%s)]' % ','.join(ignored_warnings))
statements.extend('use %s;' % i for i in sorted(set(imports)))
CGWrapper.__init__(self, child,
pre='\n'.join(statements) + '\n\n')
class CGIfWrapper(CGWrapper):
def __init__(self, condition, child):
pre = CGWrapper(CGGeneric(condition), pre="if ", post=" {\n",
reindent=True)
CGWrapper.__init__(self, CGIndenter(child), pre=pre.define(),
post="\n}")
class CGTemplatedType(CGWrapper):
def __init__(self, templateName, child):
CGWrapper.__init__(self, child, pre=templateName + "<", post=">")
class CGNamespace(CGWrapper):
def __init__(self, namespace, child, public=False):
pre = "%smod %s {\n" % ("pub " if public else "", namespace)
post = "} // mod %s" % namespace
CGWrapper.__init__(self, child, pre=pre, post=post)
@staticmethod
def build(namespaces, child, public=False):
"""
Static helper method to build multiple wrapped namespaces.
"""
if not namespaces:
return child
inner = CGNamespace.build(namespaces[1:], child, public=public)
return CGNamespace(namespaces[0], inner, public=public)
def DOMClassTypeId(desc):
protochain = desc.prototypeChain
inner = ""
if desc.hasDescendants():
if desc.interface.getExtendedAttribute("Abstract"):
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Abstract"
name = desc.interface.identifier.name
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s)" % (name, name)
elif len(protochain) == 1:
return "::dom::bindings::codegen::InheritTypes::TopTypeId::Alone"
reversed_protochain = list(reversed(protochain))
for (child, parent) in zip(reversed_protochain, reversed_protochain[1:]):
inner = "(::dom::bindings::codegen::InheritTypes::%sTypeId::%s%s)" % (parent, child, inner)
return "::dom::bindings::codegen::InheritTypes::TopTypeId::%s%s" % (protochain[0], inner)
def DOMClass(descriptor):
protoList = ['PrototypeList::ID::' + proto for proto in descriptor.prototypeChain]
# Pad out the list to the right length with ID::Last so we
# guarantee that all the lists are the same length. ID::Last
# is never the ID of any prototype, so it's safe to use as
# padding.
protoList.extend(['PrototypeList::ID::Last'] * (descriptor.config.maxProtoChainLength - len(protoList)))
prototypeChainString = ', '.join(protoList)
heapSizeOf = 'heap_size_of_raw_self_and_children::<%s>' % descriptor.interface.identifier.name
return """\
DOMClass {
interface_chain: [ %s ],
type_id: %s,
heap_size_of: %s as unsafe fn(_) -> _,
}""" % (prototypeChainString, DOMClassTypeId(descriptor), heapSizeOf)
class CGDOMJSClass(CGThing):
"""
Generate a DOMJSClass for a given descriptor
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
traceHook = 'Some(%s)' % TRACE_HOOK_NAME
if self.descriptor.isGlobal():
assert not self.descriptor.weakReferenceable
traceHook = "Some(js::jsapi::JS_GlobalObjectTraceHook)"
flags = "JSCLASS_IS_GLOBAL | JSCLASS_DOM_GLOBAL"
slots = "JSCLASS_GLOBAL_SLOT_COUNT + 1"
else:
flags = "0"
if self.descriptor.weakReferenceable:
slots = "2"
else:
slots = "1"
return """\
static Class: DOMJSClass = DOMJSClass {
base: js::jsapi::Class {
name: %s as *const u8 as *const libc::c_char,
flags: JSCLASS_IS_DOMJSCLASS | JSCLASS_IMPLEMENTS_BARRIERS | %s |
(((%s) & JSCLASS_RESERVED_SLOTS_MASK) <<
JSCLASS_RESERVED_SLOTS_SHIFT), //JSCLASS_HAS_RESERVED_SLOTS(%s),
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: Some(%s),
call: None,
hasInstance: None,
construct: None,
trace: %s,
spec: js::jsapi::ClassSpec {
createConstructor: None,
createPrototype: None,
constructorFunctions: 0 as *const js::jsapi::JSFunctionSpec,
constructorProperties: 0 as *const js::jsapi::JSPropertySpec,
prototypeFunctions: 0 as *const js::jsapi::JSFunctionSpec,
prototypeProperties: 0 as *const js::jsapi::JSPropertySpec,
finishInit: None,
flags: 0,
},
ext: js::jsapi::ClassExtension {
outerObject: %s,
innerObject: None,
isWrappedNative: false,
weakmapKeyDelegateOp: None,
objectMovedOp: None,
},
ops: js::jsapi::ObjectOps {
lookupProperty: None,
defineProperty: None,
hasProperty: None,
getProperty: None,
setProperty: None,
getOwnPropertyDescriptor: None,
deleteProperty: None,
watch: None,
unwatch: None,
getElements: None,
enumerate: None,
thisObject: %s,
funToString: None,
},
},
dom_class: %s
};""" % (str_to_const_array(self.descriptor.interface.identifier.name),
flags, slots, slots,
FINALIZE_HOOK_NAME, traceHook,
self.descriptor.outerObjectHook,
self.descriptor.outerObjectHook,
CGGeneric(DOMClass(self.descriptor)).define())
def str_to_const_array(s):
return "b\"%s\\0\"" % s
class CGPrototypeJSClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
name = str_to_const_array(self.descriptor.interface.identifier.name + "Prototype")
slotCount = 0
if self.descriptor.hasUnforgeableMembers:
slotCount += 1
return """\
static PrototypeClass: JSClass = JSClass {
name: %(name)s as *const u8 as *const libc::c_char,
flags:
// JSCLASS_HAS_RESERVED_SLOTS(%(slotCount)s)
(%(slotCount)s & JSCLASS_RESERVED_SLOTS_MASK) << JSCLASS_RESERVED_SLOTS_SHIFT,
addProperty: None,
delProperty: None,
getProperty: None,
setProperty: None,
enumerate: None,
resolve: None,
convert: None,
finalize: None,
call: None,
hasInstance: None,
construct: None,
trace: None,
reserved: [0 as *mut libc::c_void; 26]
};
""" % {'name': name, 'slotCount': slotCount}
class CGInterfaceObjectJSClass(CGThing):
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
if self.descriptor.interface.ctor():
constructor = CONSTRUCT_HOOK_NAME
else:
constructor = "throwing_constructor"
args = {
"constructor": constructor,
"hasInstance": HASINSTANCE_HOOK_NAME,
"name": self.descriptor.interface.identifier.name,
}
return """\
static InterfaceObjectClass: NonCallbackInterfaceObjectClass =
NonCallbackInterfaceObjectClass::new(%(constructor)s, %(hasInstance)s,
fun_to_string);
""" % args
class CGList(CGThing):
"""
Generate code for a list of GCThings. Just concatenates them together, with
an optional joiner string. "\n" is a common joiner.
"""
def __init__(self, children, joiner=""):
CGThing.__init__(self)
self.children = children
self.joiner = joiner
def append(self, child):
self.children.append(child)
def prepend(self, child):
self.children.insert(0, child)
def join(self, generator):
return self.joiner.join(filter(lambda s: len(s) > 0, (child for child in generator)))
def define(self):
return self.join(child.define() for child in self.children if child is not None)
class CGIfElseWrapper(CGList):
def __init__(self, condition, ifTrue, ifFalse):
kids = [CGIfWrapper(condition, ifTrue),
CGWrapper(CGIndenter(ifFalse), pre=" else {\n", post="\n}")]
CGList.__init__(self, kids)
class CGGeneric(CGThing):
"""
A class that spits out a fixed string into the codegen. Can spit out a
separate string for the declaration too.
"""
def __init__(self, text):
self.text = text
def define(self):
return self.text
class CGCallbackTempRoot(CGGeneric):
def __init__(self, name):
CGGeneric.__init__(self, "%s::new(${val}.get().to_object())" % name)
def getAllTypes(descriptors, dictionaries, callbacks):
"""
Generate all the types we're dealing with. For each type, a tuple
containing type, descriptor, dictionary is yielded. The
descriptor and dictionary can be None if the type does not come
from a descriptor or dictionary; they will never both be non-None.
"""
for d in descriptors:
for t in getTypesFromDescriptor(d):
yield (t, d, None)
for dictionary in dictionaries:
for t in getTypesFromDictionary(dictionary):
yield (t, None, dictionary)
for callback in callbacks:
for t in getTypesFromCallback(callback):
yield (t, None, None)
def UnionTypes(descriptors, dictionaries, callbacks, config):
"""
Returns a CGList containing CGUnionStructs for every union.
"""
imports = [
'dom::bindings::codegen::PrototypeList',
'dom::bindings::conversions::FromJSValConvertible',
'dom::bindings::conversions::ToJSValConvertible',
'dom::bindings::conversions::ConversionBehavior',
'dom::bindings::conversions::root_from_handlevalue',
'dom::bindings::conversions::StringificationBehavior',
'dom::bindings::error::throw_not_in_union',
'dom::bindings::js::Root',
'dom::bindings::str::USVString',
'dom::types::*',
'js::jsapi::JSContext',
'js::jsapi::{HandleValue, MutableHandleValue}',
'js::jsval::JSVal',
'util::str::DOMString',
]
# Now find all the things we'll need as arguments and return values because
# we need to wrap or unwrap them.
unionStructs = dict()
for (t, descriptor, dictionary) in getAllTypes(descriptors, dictionaries, callbacks):
assert not descriptor or not dictionary
t = t.unroll()
if not t.isUnion():
continue
name = str(t)
if name not in unionStructs:
provider = descriptor or config.getDescriptorProvider()
unionStructs[name] = CGList([
CGUnionStruct(t, provider),
CGUnionConversionStruct(t, provider)
])
# Sort unionStructs by key, retrieve value
unionStructs = (i[1] for i in sorted(unionStructs.items(), key=operator.itemgetter(0)))
return CGImports(CGList(unionStructs, "\n\n"), [], [], imports, ignored_warnings=[])
class Argument():
"""
A class for outputting the type and name of an argument
"""
def __init__(self, argType, name, default=None, mutable=False):
self.argType = argType
self.name = name
self.default = default
self.mutable = mutable
def declare(self):
string = ('mut ' if self.mutable else '') + self.name + ((': ' + self.argType) if self.argType else '')
# XXXjdm Support default arguments somehow :/
# if self.default is not None:
# string += " = " + self.default
return string
def define(self):
return self.argType + ' ' + self.name
class CGAbstractMethod(CGThing):
"""
An abstract class for generating code for a method. Subclasses
should override definition_body to create the actual code.
descriptor is the descriptor for the interface the method is associated with
name is the name of the method as a string
returnType is the IDLType of the return value
args is a list of Argument objects
inline should be True to generate an inline method, whose body is
part of the declaration.
alwaysInline should be True to generate an inline method annotated with
MOZ_ALWAYS_INLINE.
If templateArgs is not None it should be a list of strings containing
template arguments, and the function will be templatized using those
arguments.
docs is None or documentation for the method in a string.
"""
def __init__(self, descriptor, name, returnType, args, inline=False,
alwaysInline=False, extern=False, pub=False, templateArgs=None,
unsafe=False, docs=None):
CGThing.__init__(self)
self.descriptor = descriptor
self.name = name
self.returnType = returnType
self.args = args
self.alwaysInline = alwaysInline
self.extern = extern
self.templateArgs = templateArgs
self.pub = pub
self.unsafe = unsafe
self.docs = docs
def _argstring(self):
return ', '.join([a.declare() for a in self.args])
def _template(self):
if self.templateArgs is None:
return ''
return '<%s>\n' % ', '.join(self.templateArgs)
def _docs(self):
if self.docs is None:
return ''
lines = self.docs.splitlines()
return ''.join('/// %s\n' % line for line in lines)
def _decorators(self):
decorators = []
if self.alwaysInline:
decorators.append('#[inline]')
if self.extern:
decorators.append('unsafe')
decorators.append('extern')
if self.pub:
decorators.append('pub')
if not decorators:
return ''
return ' '.join(decorators) + ' '
def _returnType(self):
return (" -> %s" % self.returnType) if self.returnType != "void" else ""
def define(self):
body = self.definition_body()
# Method will already be marked `unsafe` if `self.extern == True`
if self.unsafe and not self.extern:
body = CGWrapper(CGIndenter(body), pre="unsafe {\n", post="\n}")
return CGWrapper(CGIndenter(body),
pre=self.definition_prologue(),
post=self.definition_epilogue()).define()
def definition_prologue(self):
return "%s%sfn %s%s(%s)%s {\n" % (self._docs(), self._decorators(),
self.name, self._template(),
self._argstring(), self._returnType())
def definition_epilogue(self):
return "\n}\n"
def definition_body(self):
raise NotImplementedError # Override me!
def CreateBindingJSObject(descriptor, parent=None):
create = "let raw = Box::into_raw(object);\nlet _rt = RootedTraceable::new(&*raw);\n"
if descriptor.proxy:
assert not descriptor.isGlobal()
create += """
let handler = RegisterBindings::proxy_handlers[PrototypeList::Proxies::%s as usize];
let private = RootedValue::new(cx, PrivateValue(raw as *const libc::c_void));
let obj = NewProxyObject(cx, handler,
private.handle(),
proto.ptr, %s.get(),
ptr::null_mut(), ptr::null_mut());
assert!(!obj.is_null());
let obj = RootedObject::new(cx, obj);\
""" % (descriptor.name, parent)
elif descriptor.isGlobal():
create += ("let obj = RootedObject::new(\n"
" cx,\n"
" create_dom_global(\n"
" cx,\n"
" &Class.base as *const js::jsapi::Class as *const JSClass,\n"
" raw as *const libc::c_void,\n"
" Some(%s))\n"
");\n"
"assert!(!obj.ptr.is_null());" % TRACE_HOOK_NAME)
else:
create += ("let obj = RootedObject::new(cx, JS_NewObjectWithGivenProto(\n"
" cx, &Class.base as *const js::jsapi::Class as *const JSClass, proto.handle()));\n"
"assert!(!obj.ptr.is_null());\n"
"\n"
"JS_SetReservedSlot(obj.ptr, DOM_OBJECT_SLOT,\n"
" PrivateValue(raw as *const libc::c_void));")
if descriptor.weakReferenceable:
create += """
JS_SetReservedSlot(obj.ptr, DOM_WEAK_SLOT, PrivateValue(ptr::null()));"""
return create
def InitUnforgeablePropertiesOnHolder(descriptor, properties):
"""
Define the unforgeable properties on the unforgeable holder for
the interface represented by descriptor.
properties is a PropertyArrays instance.
"""
unforgeables = []
defineUnforgeableAttrs = "define_properties(cx, unforgeable_holder.handle(), %s).unwrap();"
defineUnforgeableMethods = "define_methods(cx, unforgeable_holder.handle(), %s).unwrap();"
unforgeableMembers = [
(defineUnforgeableAttrs, properties.unforgeable_attrs),
(defineUnforgeableMethods, properties.unforgeable_methods),
]
for template, array in unforgeableMembers:
if array.length() > 0:
unforgeables.append(CGGeneric(template % array.variableName()))
return CGList(unforgeables, "\n")
def CopyUnforgeablePropertiesToInstance(descriptor):
"""
Copy the unforgeable properties from the unforgeable holder for
this interface to the instance object we have.
"""
if not descriptor.hasUnforgeableMembers:
return ""
copyCode = ""
# For proxies, we want to define on the expando object, not directly on the
# reflector, so we can make sure we don't get confused by named getters.
if descriptor.proxy:
copyCode += """\
let expando = RootedObject::new(cx, ensure_expando_object(cx, obj.handle()));
"""
obj = "expando"
else:
obj = "obj"
# We can't do the fast copy for globals, because we can't allocate the
# unforgeable holder for those with the right JSClass. Luckily, there
# aren't too many globals being created.
if descriptor.isGlobal():
copyFunc = "JS_CopyPropertiesFrom"
else:
copyFunc = "JS_InitializePropertiesFromCompatibleNativeObject"
copyCode += """\
let mut unforgeable_holder = RootedObject::new(cx, ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_GetReservedSlot(proto.ptr, DOM_PROTO_UNFORGEABLE_HOLDER_SLOT).to_object());
assert!(%(copyFunc)s(cx, %(obj)s.handle(), unforgeable_holder.handle()));
""" % {'copyFunc': copyFunc, 'obj': obj}
return copyCode
class CGWrapMethod(CGAbstractMethod):
"""
Class that generates the FooBinding::Wrap function for non-callback
interfaces.
"""
def __init__(self, descriptor):
assert not descriptor.interface.isCallback()
if not descriptor.isGlobal():
args = [Argument('*mut JSContext', 'cx'), Argument('GlobalRef', 'scope'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
else:
args = [Argument('*mut JSContext', 'cx'),
Argument("Box<%s>" % descriptor.concreteType, 'object')]
retval = 'Root<%s>' % descriptor.concreteType
CGAbstractMethod.__init__(self, descriptor, 'Wrap', retval, args,
pub=True, unsafe=True)
def definition_body(self):
unforgeable = CopyUnforgeablePropertiesToInstance(self.descriptor)
if not self.descriptor.isGlobal():
create = CreateBindingJSObject(self.descriptor, "scope")
return CGGeneric("""\
let _ar = JSAutoRequest::new(cx);
let scope = scope.reflector().get_jsobject();
assert!(!scope.get().is_null());
assert!(((*JS_GetClass(scope.get())).flags & JSCLASS_IS_GLOBAL) != 0);
let mut proto = RootedObject::new(cx, ptr::null_mut());
let _ac = JSAutoCompartment::new(cx, scope.get());
GetProtoObject(cx, scope, scope, proto.handle_mut());
assert!(!proto.ptr.is_null());
%(createObject)s
%(copyUnforgeable)s
(*raw).init_reflector(obj.ptr);
Root::from_ref(&*raw)""" % {'copyUnforgeable': unforgeable, 'createObject': create})
else:
create = CreateBindingJSObject(self.descriptor)
return CGGeneric("""\
let _ar = JSAutoRequest::new(cx);
%(createObject)s
let _ac = JSAutoCompartment::new(cx, obj.ptr);
let mut proto = RootedObject::new(cx, ptr::null_mut());
GetProtoObject(cx, obj.handle(), obj.handle(), proto.handle_mut());
JS_SetPrototype(cx, obj.handle(), proto.handle());
%(copyUnforgeable)s
(*raw).init_reflector(obj.ptr);
let ret = Root::from_ref(&*raw);
RegisterBindings::Register(cx, obj.handle());
ret""" % {'copyUnforgeable': unforgeable, 'createObject': create})
class CGIDLInterface(CGThing):
"""
Class for codegen of an implementation of the IDLInterface trait.
"""
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
interface = self.descriptor.interface
name = self.descriptor.name
if (interface.getUserData("hasConcreteDescendant", False) or
interface.getUserData("hasProxyDescendant", False)):
depth = self.descriptor.prototypeDepth
check = "class.interface_chain[%s] == PrototypeList::ID::%s" % (depth, name)
elif self.descriptor.proxy:
check = "class as *const _ == &Class as *const _"
else:
check = "class as *const _ == &Class.dom_class as *const _"
return """\
impl IDLInterface for %(name)s {
#[inline]
fn derives(class: &'static DOMClass) -> bool {
%(check)s
}
}
impl PartialEq for %(name)s {
fn eq(&self, other: &%(name)s) -> bool {
self as *const %(name)s == &*other
}
}
""" % {'check': check, 'name': name}
class CGAbstractExternMethod(CGAbstractMethod):
"""
Abstract base class for codegen of implementation-only (no
declaration) static methods.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractMethod.__init__(self, descriptor, name, returnType, args,
inline=False, extern=True)
class PropertyArrays():
def __init__(self, descriptor):
self.static_methods = MethodDefiner(descriptor, "StaticMethods",
static=True, unforgeable=False)
self.static_attrs = AttrDefiner(descriptor, "StaticAttributes",
static=True, unforgeable=False)
self.methods = MethodDefiner(descriptor, "Methods", static=False, unforgeable=False)
self.unforgeable_methods = MethodDefiner(descriptor, "UnforgeableMethods",
static=False, unforgeable=True)
self.attrs = AttrDefiner(descriptor, "Attributes", static=False, unforgeable=False)
self.unforgeable_attrs = AttrDefiner(descriptor, "UnforgeableAttributes",
static=False, unforgeable=True)
self.consts = ConstDefiner(descriptor, "Constants")
pass
@staticmethod
def arrayNames():
return [
"static_methods",
"static_attrs",
"methods",
"unforgeable_methods",
"attrs",
"unforgeable_attrs",
"consts",
]
def variableNames(self):
names = {}
for array in self.arrayNames():
names[array] = getattr(self, array).variableName()
return names
def __str__(self):
define = ""
for array in self.arrayNames():
define += str(getattr(self, array))
return define
class CGCreateInterfaceObjectsMethod(CGAbstractMethod):
"""
Generate the CreateInterfaceObjects method for an interface descriptor.
properties should be a PropertyArrays instance.
"""
def __init__(self, descriptor, properties):
args = [Argument('*mut JSContext', 'cx')]
if not descriptor.interface.isCallback():
args += [Argument('HandleObject', 'global'),
Argument('*mut ProtoOrIfaceArray', 'cache')]
args.append(Argument('HandleObject', 'receiver'))
CGAbstractMethod.__init__(self, descriptor, 'CreateInterfaceObjects', 'void', args,
unsafe=True)
self.properties = properties
def definition_body(self):
name = self.descriptor.interface.identifier.name
if self.descriptor.interface.isCallback():
assert not self.descriptor.interface.ctor() and self.descriptor.interface.hasConstants()
return CGGeneric("""\
create_callback_interface_object(cx, receiver, sConstants, %s);""" % str_to_const_array(name))
protoChain = self.descriptor.prototypeChain
if len(protoChain) == 1:
getPrototypeProto = "prototype_proto.ptr = JS_GetObjectPrototype(cx, global)"
else:
getPrototypeProto = ("%s::GetProtoObject(cx, global, receiver, prototype_proto.handle_mut())" %
toBindingNamespace(self.descriptor.prototypeChain[-2]))
code = [CGGeneric("""\
let mut prototype_proto = RootedObject::new(cx, ptr::null_mut());
%s;
assert!(!prototype_proto.ptr.is_null());""" % getPrototypeProto)]
properties = {"id": name}
for arrayName in self.properties.arrayNames():
array = getattr(self.properties, arrayName)
if arrayName == "consts":
if array.length():
properties[arrayName] = array.variableName()
else:
properties[arrayName] = "&[]"
elif array.length():
properties[arrayName] = "Some(%s)" % array.variableName()
else:
properties[arrayName] = "None"
code.append(CGGeneric("""
let mut prototype = RootedObject::new(cx, ptr::null_mut());
create_interface_prototype_object(cx,
prototype_proto.handle(),
&PrototypeClass,
%(methods)s,
%(attrs)s,
%(consts)s,
prototype.handle_mut());
assert!(!prototype.ptr.is_null());
(*cache)[PrototypeList::ID::%(id)s as usize] = prototype.ptr;
if <*mut JSObject>::needs_post_barrier(prototype.ptr) {
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::ID::%(id)s as isize));
}""" % properties))
if self.descriptor.interface.hasInterfaceObject():
properties["name"] = str_to_const_array(name)
if self.descriptor.interface.ctor():
properties["constructor"] = CONSTRUCT_HOOK_NAME
properties["length"] = methodLength(self.descriptor.interface.ctor())
else:
properties["constructor"] = "throwing_constructor"
properties["length"] = 0
if self.descriptor.interface.parent:
parentName = toBindingNamespace(self.descriptor.getParentName())
code.append(CGGeneric("""
let mut interface_proto = RootedObject::new(cx, ptr::null_mut());
%s::GetConstructorObject(cx, global, receiver, interface_proto.handle_mut());""" % parentName))
else:
code.append(CGGeneric("""
let interface_proto = RootedObject::new(cx, JS_GetFunctionPrototype(cx, global));"""))
code.append(CGGeneric("""\
assert!(!interface_proto.ptr.is_null());
let mut interface = RootedObject::new(cx, ptr::null_mut());
create_noncallback_interface_object(cx,
receiver,
interface_proto.handle(),
&InterfaceObjectClass,
%(static_methods)s,
%(static_attrs)s,
%(consts)s,
prototype.handle(),
%(name)s,
%(length)s,
interface.handle_mut());
assert!(!interface.ptr.is_null());""" % properties))
if self.descriptor.hasDescendants():
code.append(CGGeneric("""\
(*cache)[PrototypeList::Constructor::%(id)s as usize] = interface.ptr;
if <*mut JSObject>::needs_post_barrier(prototype.ptr) {
<*mut JSObject>::post_barrier((*cache).as_mut_ptr().offset(PrototypeList::Constructor::%(id)s as isize));
}""" % properties))
constructors = self.descriptor.interface.namedConstructors
if constructors:
decl = "let named_constructors: [(NonNullJSNative, &'static [u8], u32); %d]" % len(constructors)
specs = []
for constructor in constructors:
hook = CONSTRUCT_HOOK_NAME + "_" + constructor.identifier.name
name = str_to_const_array(constructor.identifier.name)
length = methodLength(constructor)
specs.append(CGGeneric("(%s as NonNullJSNative, %s, %d)" % (hook, name, length)))
values = CGIndenter(CGList(specs, "\n"), 4)
code.append(CGWrapper(values, pre="%s = [\n" % decl, post="\n];"))
code.append(CGGeneric("create_named_constructors(cx, receiver, &named_constructors, prototype.handle());"))
if self.descriptor.hasUnforgeableMembers:
# We want to use the same JSClass and prototype as the object we'll
# end up defining the unforgeable properties on in the end, so that
# we can use JS_InitializePropertiesFromCompatibleNativeObject to do
# a fast copy. In the case of proxies that's null, because the
# expando object is a vanilla object, but in the case of other DOM
# objects it's whatever our class is.
#
# Also, for a global we can't use the global's class; just use
# nullpr and when we do the copy off the holder we'll take a slower
# path. This also means that we don't need to worry about matching
# the prototype.
if self.descriptor.proxy or self.descriptor.isGlobal():
holderClass = "ptr::null()"
holderProto = "HandleObject::null()"
else:
holderClass = "&Class.base as *const js::jsapi::Class as *const JSClass"
holderProto = "prototype.handle()"
code.append(CGGeneric("""
let mut unforgeable_holder = RootedObject::new(cx, ptr::null_mut());
unforgeable_holder.handle_mut().set(
JS_NewObjectWithoutMetadata(cx, %(holderClass)s, %(holderProto)s));
assert!(!unforgeable_holder.ptr.is_null());
""" % {'holderClass': holderClass, 'holderProto': holderProto}))
code.append(InitUnforgeablePropertiesOnHolder(self.descriptor, self.properties))
code.append(CGGeneric("""\
JS_SetReservedSlot(prototype.ptr, DOM_PROTO_UNFORGEABLE_HOLDER_SLOT,
ObjectValue(&*unforgeable_holder.ptr))"""))
return CGList(code, "\n")
class CGGetPerInterfaceObject(CGAbstractMethod):
"""
A method for getting a per-interface object (a prototype object or interface
constructor object).
"""
def __init__(self, descriptor, name, idPrefix="", pub=False):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'global'),
Argument('HandleObject', 'receiver'),
Argument('MutableHandleObject', 'rval')]
CGAbstractMethod.__init__(self, descriptor, name,
'void', args, pub=pub, unsafe=True)
self.id = idPrefix + "::" + self.descriptor.name
def definition_body(self):
return CGGeneric("""
/* global and receiver are usually the same, but they can be different
too. For example a sandbox often has an xray wrapper for a window as the
prototype of the sandbox's global. In that case receiver is the xray
wrapper and global is the sandbox's global.
*/
assert!(((*JS_GetClass(global.get())).flags & JSCLASS_DOM_GLOBAL) != 0);
/* Check to see whether the interface objects are already installed */
let proto_or_iface_array = get_proto_or_iface_array(global.get());
rval.set((*proto_or_iface_array)[%(id)s as usize]);
if !rval.get().is_null() {
return;
}
CreateInterfaceObjects(cx, global, proto_or_iface_array, receiver);
rval.set((*proto_or_iface_array)[%(id)s as usize]);
assert!(!rval.get().is_null());
""" % {"id": self.id})
class CGGetProtoObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface prototype object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetProtoObject",
"PrototypeList::ID", pub=descriptor.hasDescendants())
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface prototype object for this class. This will create the
object as needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGGetConstructorObjectMethod(CGGetPerInterfaceObject):
"""
A method for getting the interface constructor object.
"""
def __init__(self, descriptor):
CGGetPerInterfaceObject.__init__(self, descriptor, "GetConstructorObject",
"PrototypeList::Constructor",
pub=descriptor.hasDescendants())
def definition_body(self):
return CGList([
CGGeneric("""\
/* Get the interface object for this class. This will create the object as
needed. */"""),
CGGetPerInterfaceObject.definition_body(self),
])
class CGDefineProxyHandler(CGAbstractMethod):
"""
A method to create and cache the proxy trap for a given interface.
"""
def __init__(self, descriptor):
assert descriptor.proxy
CGAbstractMethod.__init__(self, descriptor, 'DefineProxyHandler',
'*const libc::c_void', [],
pub=True, unsafe=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
customDefineProperty = 'proxyhandler::define_property'
if self.descriptor.operations['IndexedSetter'] or self.descriptor.operations['NamedSetter']:
customDefineProperty = 'defineProperty'
customDelete = 'proxyhandler::delete'
if self.descriptor.operations['NamedDeleter']:
customDelete = 'delete'
body = """\
let traps = ProxyTraps {
enter: None,
getOwnPropertyDescriptor: Some(getOwnPropertyDescriptor),
defineProperty: Some(%s),
ownPropertyKeys: Some(own_property_keys),
delete_: Some(%s),
enumerate: None,
preventExtensions: Some(proxyhandler::prevent_extensions),
isExtensible: Some(proxyhandler::is_extensible),
has: None,
get: Some(get),
set: None,
call: None,
construct: None,
getPropertyDescriptor: Some(get_property_descriptor),
hasOwn: Some(hasOwn),
getOwnEnumerablePropertyKeys: None,
nativeCall: None,
hasInstance: None,
objectClassIs: None,
className: Some(className),
fun_toString: None,
boxedValue_unbox: None,
defaultValue: None,
trace: Some(%s),
finalize: Some(%s),
objectMoved: None,
isCallable: None,
isConstructor: None,
};
CreateProxyHandler(&traps, &Class as *const _ as *const _)\
""" % (customDefineProperty, customDelete, TRACE_HOOK_NAME, FINALIZE_HOOK_NAME)
return CGGeneric(body)
class CGDefineDOMInterfaceMethod(CGAbstractMethod):
"""
A method for resolve hooks to try to lazily define the interface object for
a given interface.
"""
def __init__(self, descriptor):
assert descriptor.interface.hasInterfaceObject()
args = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, descriptor, 'DefineDOMInterface', 'void', args, pub=True)
def define(self):
return CGAbstractMethod.define(self)
def definition_body(self):
if self.descriptor.interface.isCallback():
code = "CreateInterfaceObjects(cx, global);"
else:
code = """\
let mut proto = RootedObject::new(cx, ptr::null_mut());
GetProtoObject(cx, global, global, proto.handle_mut());
assert!(!proto.ptr.is_null());
"""
return CGGeneric("assert!(!global.get().is_null());\n" + code)
def needCx(returnType, arguments, considerTypes):
return (considerTypes and
(typeNeedsCx(returnType, True) or
any(typeNeedsCx(a.type) for a in arguments)))
class CGCallGenerator(CGThing):
"""
A class to generate an actual call to a C++ object. Assumes that the C++
object is stored in a variable whose name is given by the |object| argument.
errorResult should be a string for the value to return in case of an
exception from the native code, or None if no error reporting is needed.
"""
def __init__(self, errorResult, arguments, argsPre, returnType,
extendedAttributes, descriptorProvider, nativeMethodName,
static, object="this"):
CGThing.__init__(self)
assert errorResult is None or isinstance(errorResult, str)
isFallible = errorResult is not None
result = getRetvalDeclarationForType(returnType, descriptorProvider)
if isFallible:
result = CGWrapper(result, pre="Result<", post=", Error>")
args = CGList([CGGeneric(arg) for arg in argsPre], ", ")
for (a, name) in arguments:
# XXXjdm Perhaps we should pass all nontrivial types by borrowed pointer
if a.type.isDictionary():
name = "&" + name
args.append(CGGeneric(name))
needsCx = needCx(returnType, (a for (a, _) in arguments), True)
if "cx" not in argsPre and needsCx:
args.prepend(CGGeneric("cx"))
# Build up our actual call
self.cgRoot = CGList([], "\n")
call = CGGeneric(nativeMethodName)
if static:
call = CGWrapper(call, pre="%s::" % descriptorProvider.interface.identifier.name)
else:
call = CGWrapper(call, pre="%s." % object)
call = CGList([call, CGWrapper(args, pre="(", post=")")])
self.cgRoot.append(CGList([
CGGeneric("let result: "),
result,
CGGeneric(" = "),
call,
CGGeneric(";"),
]))
if isFallible:
if static:
glob = ""
else:
glob = " let global = global_root_from_reflector(this);\n"
self.cgRoot.append(CGGeneric(
"let result = match result {\n"
" Ok(result) => result,\n"
" Err(e) => {\n"
"%s"
" throw_dom_exception(cx, global.r(), e);\n"
" return%s;\n"
" },\n"
"};" % (glob, errorResult)))
def define(self):
return self.cgRoot.define()
class CGPerSignatureCall(CGThing):
"""
This class handles the guts of generating code for a particular
call signature. A call signature consists of four things:
1) A return type, which can be None to indicate that there is no
actual return value (e.g. this is an attribute setter) or an
IDLType if there's an IDL type involved (including |void|).
2) An argument list, which is allowed to be empty.
3) A name of a native method to call.
4) Whether or not this method is static.
We also need to know whether this is a method or a getter/setter
to do error reporting correctly.
The idlNode parameter can be either a method or an attr. We can query
|idlNode.identifier| in both cases, so we can be agnostic between the two.
"""
# XXXbz For now each entry in the argument list is either an
# IDLArgument or a FakeArgument, but longer-term we may want to
# have ways of flagging things like JSContext* or optional_argc in
# there.
def __init__(self, returnType, argsPre, arguments, nativeMethodName, static,
descriptor, idlNode, argConversionStartsAt=0,
getter=False, setter=False):
CGThing.__init__(self)
self.returnType = returnType
self.descriptor = descriptor
self.idlNode = idlNode
self.extendedAttributes = descriptor.getExtendedAttributes(idlNode,
getter=getter,
setter=setter)
self.argsPre = argsPre
self.arguments = arguments
self.argCount = len(arguments)
cgThings = []
cgThings.extend([CGArgumentConverter(arguments[i], i, self.getArgs(),
self.getArgc(), self.descriptor,
invalidEnumValueFatal=not setter) for
i in range(argConversionStartsAt, self.argCount)])
errorResult = None
if self.isFallible():
errorResult = " false"
cgThings.append(CGCallGenerator(
errorResult,
self.getArguments(), self.argsPre, returnType,
self.extendedAttributes, descriptor, nativeMethodName,
static))
self.cgRoot = CGList(cgThings, "\n")
def getArgs(self):
return "args" if self.argCount > 0 else ""
def getArgc(self):
return "argc"
def getArguments(self):
def process(arg, i):
argVal = "arg" + str(i)
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
return [(a, process(a, i)) for (i, a) in enumerate(self.arguments)]
def isFallible(self):
return 'infallible' not in self.extendedAttributes
def wrap_return_value(self):
return wrapForType('args.rval()')
def define(self):
return (self.cgRoot.define() + "\n" + self.wrap_return_value())
class CGSwitch(CGList):
"""
A class to generate code for a switch statement.
Takes three constructor arguments: an expression, a list of cases,
and an optional default.
Each case is a CGCase. The default is a CGThing for the body of
the default case, if any.
"""
def __init__(self, expression, cases, default=None):
CGList.__init__(self, [CGIndenter(c) for c in cases], "\n")
self.prepend(CGWrapper(CGGeneric(expression),
pre="match ", post=" {"))
if default is not None:
self.append(
CGIndenter(
CGWrapper(
CGIndenter(default),
pre="_ => {\n",
post="\n}"
)
)
)
self.append(CGGeneric("}"))
class CGCase(CGList):
"""
A class to generate code for a case statement.
Takes three constructor arguments: an expression, a CGThing for
the body (allowed to be None if there is no body), and an optional
argument (defaulting to False) for whether to fall through.
"""
def __init__(self, expression, body, fallThrough=False):
CGList.__init__(self, [], "\n")
self.append(CGWrapper(CGGeneric(expression), post=" => {"))
bodyList = CGList([body], "\n")
if fallThrough:
raise TypeError("fall through required but unsupported")
# bodyList.append(CGGeneric('panic!("fall through unsupported"); /* Fall through */'))
self.append(CGIndenter(bodyList))
self.append(CGGeneric("}"))
class CGGetterCall(CGPerSignatureCall):
"""
A class to generate a native object getter call for a particular IDL
getter.
"""
def __init__(self, argsPre, returnType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, returnType, argsPre, [],
nativeMethodName, attr.isStatic(), descriptor,
attr, getter=True)
class FakeArgument():
"""
A class that quacks like an IDLArgument. This is used to make
setters look like method calls or for special operations.
"""
def __init__(self, type, interfaceMember, allowTreatNonObjectAsNull=False):
self.type = type
self.optional = False
self.variadic = False
self.defaultValue = None
self._allowTreatNonObjectAsNull = allowTreatNonObjectAsNull
self.treatNullAs = interfaceMember.treatNullAs
self.enforceRange = False
self.clamp = False
def allowTreatNonCallableAsNull(self):
return self._allowTreatNonObjectAsNull
class CGSetterCall(CGPerSignatureCall):
"""
A class to generate a native object setter call for a particular IDL
setter.
"""
def __init__(self, argsPre, argType, nativeMethodName, descriptor, attr):
CGPerSignatureCall.__init__(self, None, argsPre,
[FakeArgument(argType, attr, allowTreatNonObjectAsNull=True)],
nativeMethodName, attr.isStatic(), descriptor, attr,
setter=True)
def wrap_return_value(self):
# We have no return value
return "\nreturn true;"
def getArgc(self):
return "1"
class CGAbstractStaticBindingMethod(CGAbstractMethod):
"""
Common class to generate the JSNatives for all our static methods, getters
and setters. This will generate the function declaration and unwrap the
global object. Subclasses are expected to override the generate_code
function to do the rest of the work. This function should return a
CGThing which is already properly indented.
"""
def __init__(self, descriptor, name):
args = [
Argument('*mut JSContext', 'cx'),
Argument('libc::c_uint', 'argc'),
Argument('*mut JSVal', 'vp'),
]
CGAbstractMethod.__init__(self, descriptor, name, "bool", args, extern=True)
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
""")
return CGList([preamble, self.generate_code()])
def generate_code(self):
raise NotImplementedError # Override me!
class CGSpecializedMethod(CGAbstractExternMethod):
"""
A class for generating the C++ code for a specialized method that the JIT
can call with lower overhead.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('*const JSJitMethodCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
def definition_body(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
return CGWrapper(CGMethodCall([], nativeName, self.method.isStatic(),
self.descriptor, self.method),
pre="let this = &*this;\n"
"let args = &*args;\n"
"let argc = args._base.argc_;\n")
@staticmethod
def makeNativeName(descriptor, method):
name = method.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return MakeNativeName(nativeName)
class CGStaticMethod(CGAbstractStaticBindingMethod):
"""
A class for generating the Rust code for an IDL static method.
"""
def __init__(self, descriptor, method):
self.method = method
name = method.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedMethod.makeNativeName(self.descriptor,
self.method)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGMethodCall(["global.r()"], nativeName, True, self.descriptor, self.method)
return CGList([setupArgs, call])
class CGSpecializedGetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute getter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', '_obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitGetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGGetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
nativeName = MakeNativeName(nativeName)
infallible = ('infallible' in
descriptor.getExtendedAttributes(attr, getter=True))
if attr.type.nullable() or not infallible:
return "Get" + nativeName
return nativeName
class CGStaticGetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute getter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'get_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedGetter.makeNativeName(self.descriptor,
self.attr)
setupArgs = CGGeneric("let args = CallArgs::from_vp(vp, argc);\n")
call = CGGetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([setupArgs, call])
class CGSpecializedSetter(CGAbstractExternMethod):
"""
A class for generating the code for a specialized attribute setter
that the JIT can call with lower overhead.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + descriptor.internalNameFor(attr.identifier.name)
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('*const %s' % descriptor.concreteType, 'this'),
Argument('JSJitSetterCallArgs', 'args')]
CGAbstractExternMethod.__init__(self, descriptor, name, "bool", args)
def definition_body(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
return CGWrapper(CGSetterCall([], self.attr.type, nativeName,
self.descriptor, self.attr),
pre="let this = &*this;\n")
@staticmethod
def makeNativeName(descriptor, attr):
name = attr.identifier.name
nativeName = descriptor.binaryNameFor(name)
if nativeName == name:
nativeName = descriptor.internalNameFor(name)
return "Set" + MakeNativeName(nativeName)
class CGStaticSetter(CGAbstractStaticBindingMethod):
"""
A class for generating the C++ code for an IDL static attribute setter.
"""
def __init__(self, descriptor, attr):
self.attr = attr
name = 'set_' + attr.identifier.name
CGAbstractStaticBindingMethod.__init__(self, descriptor, name)
def generate_code(self):
nativeName = CGSpecializedSetter.makeNativeName(self.descriptor,
self.attr)
checkForArg = CGGeneric(
"let args = CallArgs::from_vp(vp, argc);\n"
"if argc == 0 {\n"
" throw_type_error(cx, \"Not enough arguments to %s setter.\");\n"
" return false;\n"
"}" % self.attr.identifier.name)
call = CGSetterCall(["global.r()"], self.attr.type, nativeName, self.descriptor,
self.attr)
return CGList([checkForArg, call])
class CGSpecializedForwardingSetter(CGSpecializedSetter):
"""
A class for generating the code for an IDL attribute forwarding setter.
"""
def __init__(self, descriptor, attr):
CGSpecializedSetter.__init__(self, descriptor, attr)
def definition_body(self):
attrName = self.attr.identifier.name
forwardToAttrName = self.attr.getExtendedAttribute("PutForwards")[0]
# JS_GetProperty and JS_SetProperty can only deal with ASCII
assert all(ord(c) < 128 for c in attrName)
assert all(ord(c) < 128 for c in forwardToAttrName)
return CGGeneric("""\
let mut v = RootedValue::new(cx, UndefinedValue());
if !JS_GetProperty(cx, obj, %s as *const u8 as *const libc::c_char, v.handle_mut()) {
return false;
}
if !v.ptr.is_object() {
throw_type_error(cx, "Value.%s is not an object.");
return false;
}
let target_obj = RootedObject::new(cx, v.ptr.to_object());
JS_SetProperty(cx, target_obj.handle(), %s as *const u8 as *const libc::c_char, args.get(0))
""" % (str_to_const_array(attrName), attrName, str_to_const_array(forwardToAttrName)))
class CGMemberJITInfo(CGThing):
"""
A class for generating the JITInfo for a property that points to
our specialized getter and setter.
"""
def __init__(self, descriptor, member):
self.member = member
self.descriptor = descriptor
def defineJitInfo(self, infoName, opName, opType, infallible, movable,
aliasSet, alwaysInSlot, lazilyInSlot, slotIndex,
returnTypes, args):
"""
aliasSet is a JSJitInfo::AliasSet value, without the "JSJitInfo::" bit.
args is None if we don't want to output argTypes for some
reason (e.g. we have overloads or we're not a method) and
otherwise an iterable of the arguments for this method.
"""
assert not movable or aliasSet != "AliasEverything" # Can't move write-aliasing things
assert not alwaysInSlot or movable # Things always in slots had better be movable
def jitInfoInitializer(isTypedMethod):
initializer = fill(
"""
JSJitInfo {
call: ${opName} as *const ::libc::c_void,
protoID: PrototypeList::ID::${name} as u16,
depth: ${depth},
_bitfield_1:
JSJitInfo::new_bitfield_1(
OpType::${opType} as u8,
AliasSet::${aliasSet} as u8,
JSValueType::${returnType} as u8,
${isInfallible},
${isMovable},
${isAlwaysInSlot},
${isLazilyCachedInSlot},
${isTypedMethod},
${slotIndex} as u16,
)
}
""",
opName=opName,
name=self.descriptor.name,
depth=self.descriptor.interface.inheritanceDepth(),
opType=opType,
aliasSet=aliasSet,
returnType=reduce(CGMemberJITInfo.getSingleReturnType, returnTypes,
""),
isInfallible=toStringBool(infallible),
isMovable=toStringBool(movable),
isAlwaysInSlot=toStringBool(alwaysInSlot),
isLazilyCachedInSlot=toStringBool(lazilyInSlot),
isTypedMethod=toStringBool(isTypedMethod),
slotIndex=slotIndex)
return initializer.rstrip()
if args is not None:
argTypes = "%s_argTypes" % infoName
args = [CGMemberJITInfo.getJSArgType(arg.type) for arg in args]
args.append("ArgType::ArgTypeListEnd as i32")
argTypesDecl = (
"const %s: [i32; %d] = [ %s ];\n" %
(argTypes, len(args), ", ".join(args)))
return fill(
"""
$*{argTypesDecl}
const ${infoName}: JSTypedMethodJitInfo = JSTypedMethodJitInfo {
base: ${jitInfo},
argTypes: &${argTypes} as *const _ as *const ArgType,
};
""",
argTypesDecl=argTypesDecl,
infoName=infoName,
jitInfo=indent(jitInfoInitializer(True)),
argTypes=argTypes)
return ("\n"
"const %s: JSJitInfo = %s;\n"
% (infoName, jitInfoInitializer(False)))
def define(self):
if self.member.isAttr():
internalMemberName = self.descriptor.internalNameFor(self.member.identifier.name)
getterinfo = ("%s_getterinfo" % internalMemberName)
getter = ("get_%s" % internalMemberName)
getterinfal = "infallible" in self.descriptor.getExtendedAttributes(self.member, getter=True)
movable = self.mayBeMovable() and getterinfal
aliasSet = self.aliasSet()
isAlwaysInSlot = self.member.getExtendedAttribute("StoreInSlot")
if self.member.slotIndex is not None:
assert isAlwaysInSlot or self.member.getExtendedAttribute("Cached")
isLazilyCachedInSlot = not isAlwaysInSlot
slotIndex = memberReservedSlot(self.member) # noqa:FIXME: memberReservedSlot is not defined
# We'll statically assert that this is not too big in
# CGUpdateMemberSlotsMethod, in the case when
# isAlwaysInSlot is true.
else:
isLazilyCachedInSlot = False
slotIndex = "0"
result = self.defineJitInfo(getterinfo, getter, "Getter",
getterinfal, movable, aliasSet,
isAlwaysInSlot, isLazilyCachedInSlot,
slotIndex,
[self.member.type], None)
if (not self.member.readonly or self.member.getExtendedAttribute("PutForwards")):
setterinfo = ("%s_setterinfo" % internalMemberName)
setter = ("set_%s" % internalMemberName)
# Setters are always fallible, since they have to do a typed unwrap.
result += self.defineJitInfo(setterinfo, setter, "Setter",
False, False, "AliasEverything",
False, False, "0",
[BuiltinTypes[IDLBuiltinType.Types.void]],
None)
return result
if self.member.isMethod():
methodinfo = ("%s_methodinfo" % self.member.identifier.name)
method = ("%s" % self.member.identifier.name)
# Methods are infallible if they are infallible, have no arguments
# to unwrap, and have a return type that's infallible to wrap up for
# return.
sigs = self.member.signatures()
if len(sigs) != 1:
# Don't handle overloading. If there's more than one signature,
# one of them must take arguments.
methodInfal = False
args = None
movable = False
else:
sig = sigs[0]
# For methods that affect nothing, it's OK to set movable to our
# notion of infallible on the C++ side, without considering
# argument conversions, since argument conversions that can
# reliably throw would be effectful anyway and the jit doesn't
# move effectful things.
hasInfallibleImpl = "infallible" in self.descriptor.getExtendedAttributes(self.member)
movable = self.mayBeMovable() and hasInfallibleImpl
# XXXbz can we move the smarts about fallibility due to arg
# conversions into the JIT, using our new args stuff?
if (len(sig[1]) != 0):
# We have arguments or our return-value boxing can fail
methodInfal = False
else:
methodInfal = hasInfallibleImpl
# For now, only bother to output args if we're side-effect-free.
if self.member.affects == "Nothing":
args = sig[1]
else:
args = None
aliasSet = self.aliasSet()
result = self.defineJitInfo(methodinfo, method, "Method",
methodInfal, movable, aliasSet,
False, False, "0",
[s[0] for s in sigs], args)
return result
raise TypeError("Illegal member type to CGPropertyJITInfo")
def mayBeMovable(self):
"""
Returns whether this attribute or method may be movable, just
based on Affects/DependsOn annotations.
"""
affects = self.member.affects
dependsOn = self.member.dependsOn
assert affects in IDLInterfaceMember.AffectsValues
assert dependsOn in IDLInterfaceMember.DependsOnValues
# Things that are DependsOn=DeviceState are not movable, because we
# don't want them coalesced with each other or loop-hoisted, since
# their return value can change even if nothing is going on from our
# point of view.
return (affects == "Nothing" and
(dependsOn != "Everything" and dependsOn != "DeviceState"))
def aliasSet(self):
"""Returns the alias set to store in the jitinfo. This may not be the
effective alias set the JIT uses, depending on whether we have enough
information about our args to allow the JIT to prove that effectful
argument conversions won't happen.
"""
dependsOn = self.member.dependsOn
assert dependsOn in IDLInterfaceMember.DependsOnValues
if dependsOn == "Nothing" or dependsOn == "DeviceState":
assert self.member.affects == "Nothing"
return "AliasNone"
if dependsOn == "DOMState":
assert self.member.affects == "Nothing"
return "AliasDOMSets"
return "AliasEverything"
@staticmethod
def getJSReturnTypeTag(t):
if t.nullable():
# Sometimes it might return null, sometimes not
return "JSVAL_TYPE_UNKNOWN"
if t.isVoid():
# No return, every time
return "JSVAL_TYPE_UNDEFINED"
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "JSVAL_TYPE_OBJECT"
if t.isMozMap():
return "JSVAL_TYPE_OBJECT"
if t.isGeckoInterface():
return "JSVAL_TYPE_OBJECT"
if t.isString():
return "JSVAL_TYPE_STRING"
if t.isEnum():
return "JSVAL_TYPE_STRING"
if t.isCallback():
return "JSVAL_TYPE_OBJECT"
if t.isAny():
# The whole point is to return various stuff
return "JSVAL_TYPE_UNKNOWN"
if t.isObject():
return "JSVAL_TYPE_OBJECT"
if t.isSpiderMonkeyInterface():
return "JSVAL_TYPE_OBJECT"
if t.isUnion():
u = t.unroll()
if u.hasNullableType:
# Might be null or not
return "JSVAL_TYPE_UNKNOWN"
return reduce(CGMemberJITInfo.getSingleReturnType,
u.flatMemberTypes, "")
if t.isDictionary():
return "JSVAL_TYPE_OBJECT"
if t.isDate():
return "JSVAL_TYPE_OBJECT"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "JSVAL_TYPE_BOOLEAN"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "JSVAL_TYPE_INT32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "JSVAL_TYPE_DOUBLE"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "JSVAL_TYPE_DOUBLE"
@staticmethod
def getSingleReturnType(existingType, t):
type = CGMemberJITInfo.getJSReturnTypeTag(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
if ((type == "JSVAL_TYPE_DOUBLE" and
existingType == "JSVAL_TYPE_INT32") or
(existingType == "JSVAL_TYPE_DOUBLE" and
type == "JSVAL_TYPE_INT32")):
# Promote INT32 to DOUBLE as needed
return "JSVAL_TYPE_DOUBLE"
# Different types
return "JSVAL_TYPE_UNKNOWN"
@staticmethod
def getJSArgType(t):
assert not t.isVoid()
if t.nullable():
# Sometimes it might return null, sometimes not
return "ArgType::Null as i32 | %s" % CGMemberJITInfo.getJSArgType(t.inner)
if t.isArray():
# No idea yet
assert False
if t.isSequence():
return "ArgType::Object as i32"
if t.isGeckoInterface():
return "ArgType::Object as i32"
if t.isString():
return "ArgType::String as i32"
if t.isEnum():
return "ArgType::String as i32"
if t.isCallback():
return "ArgType::Object as i32"
if t.isAny():
# The whole point is to return various stuff
return "ArgType::Any as i32"
if t.isObject():
return "ArgType::Object as i32"
if t.isSpiderMonkeyInterface():
return "ArgType::Object as i32"
if t.isUnion():
u = t.unroll()
type = "JSJitInfo::Null as i32" if u.hasNullableType else ""
return reduce(CGMemberJITInfo.getSingleArgType,
u.flatMemberTypes, type)
if t.isDictionary():
return "ArgType::Object as i32"
if t.isDate():
return "ArgType::Object as i32"
if not t.isPrimitive():
raise TypeError("No idea what type " + str(t) + " is.")
tag = t.tag()
if tag == IDLType.Tags.bool:
return "ArgType::Boolean as i32"
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32]:
return "ArgType::Integer as i32"
if tag in [IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
# These all use JS_NumberValue, which can return int or double.
# But TI treats "double" as meaning "int or double", so we're
# good to return JSVAL_TYPE_DOUBLE here.
return "ArgType::Double as i32"
if tag != IDLType.Tags.uint32:
raise TypeError("No idea what type " + str(t) + " is.")
# uint32 is sometimes int and sometimes double.
return "ArgType::Double as i32"
@staticmethod
def getSingleArgType(existingType, t):
type = CGMemberJITInfo.getJSArgType(t)
if existingType == "":
# First element of the list; just return its type
return type
if type == existingType:
return existingType
return "%s | %s" % (existingType, type)
def getEnumValueName(value):
# Some enum values can be empty strings. Others might have weird
# characters in them. Deal with the former by returning "_empty",
# deal with possible name collisions from that by throwing if the
# enum value is actually "_empty", and throw on any value
# containing non-ASCII chars for now. Replace all chars other than
# [0-9A-Za-z_] with '_'.
if re.match("[^\x20-\x7E]", value):
raise SyntaxError('Enum value "' + value + '" contains non-ASCII characters')
if re.match("^[0-9]", value):
raise SyntaxError('Enum value "' + value + '" starts with a digit')
value = re.sub(r'[^0-9A-Za-z_]', '_', value)
if re.match("^_[A-Z]|__", value):
raise SyntaxError('Enum value "' + value + '" is reserved by the C++ spec')
if value == "_empty":
raise SyntaxError('"_empty" is not an IDL enum value we support yet')
if value == "":
return "_empty"
return MakeNativeName(value)
class CGEnum(CGThing):
def __init__(self, enum):
CGThing.__init__(self)
decl = """\
#[repr(usize)]
#[derive(JSTraceable, PartialEq, Copy, Clone, HeapSizeOf)]
pub enum %s {
%s
}
""" % (enum.identifier.name, ",\n ".join(map(getEnumValueName, enum.values())))
inner = """\
use dom::bindings::conversions::ToJSValConvertible;
use js::jsapi::{JSContext, MutableHandleValue};
use js::jsval::JSVal;
pub const strings: &'static [&'static str] = &[
%s,
];
impl ToJSValConvertible for super::%s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
strings[*self as usize].to_jsval(cx, rval);
}
}
""" % (",\n ".join(['"%s"' % val for val in enum.values()]), enum.identifier.name)
self.cgRoot = CGList([
CGGeneric(decl),
CGNamespace.build([enum.identifier.name + "Values"],
CGIndenter(CGGeneric(inner)), public=True),
])
def define(self):
return self.cgRoot.define()
def convertConstIDLValueToRust(value):
tag = value.type.tag()
if tag in [IDLType.Tags.int8, IDLType.Tags.uint8,
IDLType.Tags.int16, IDLType.Tags.uint16,
IDLType.Tags.int32, IDLType.Tags.uint32,
IDLType.Tags.int64, IDLType.Tags.uint64,
IDLType.Tags.unrestricted_float, IDLType.Tags.float,
IDLType.Tags.unrestricted_double, IDLType.Tags.double]:
return str(value.value)
if tag == IDLType.Tags.bool:
return toStringBool(value.value)
raise TypeError("Const value of unhandled type: " + value.type)
class CGConstant(CGThing):
def __init__(self, constants):
CGThing.__init__(self)
self.constants = constants
def define(self):
def stringDecl(const):
name = const.identifier.name
value = convertConstIDLValueToRust(const.value)
return CGGeneric("pub const %s: %s = %s;\n" % (name, builtinNames[const.value.type.tag()], value))
return CGIndenter(CGList(stringDecl(m) for m in self.constants)).define()
def getUnionTypeTemplateVars(type, descriptorProvider):
# For dictionaries and sequences we need to pass None as the failureCode
# for getJSToNativeConversionInfo.
# Also, for dictionaries we would need to handle conversion of
# null/undefined to the dictionary correctly.
if type.isDictionary() or type.isSequence():
raise TypeError("Can't handle dictionaries or sequences in unions")
if type.isGeckoInterface():
name = type.inner.identifier.name
typeName = descriptorProvider.getDescriptor(name).returnType
elif type.isEnum():
name = type.inner.identifier.name
typeName = name
elif type.isArray() or type.isSequence():
name = str(type)
# XXXjdm dunno about typeName here
typeName = "/*" + type.name + "*/"
elif type.isDOMString():
name = type.name
typeName = "DOMString"
elif type.isUSVString():
name = type.name
typeName = "USVString"
elif type.isPrimitive():
name = type.name
typeName = builtinNames[type.tag()]
else:
name = type.name
typeName = "/*" + type.name + "*/"
info = getJSToNativeConversionInfo(
type, descriptorProvider, failureCode="return Ok(None);",
exceptionCode='return Err(());',
isDefinitelyObject=True)
template = info.template
assert not type.isObject()
jsConversion = string.Template(template).substitute({
"val": "value",
})
jsConversion = CGWrapper(CGGeneric(jsConversion), pre="Ok(Some(", post="))")
return {
"name": name,
"typeName": typeName,
"jsConversion": jsConversion,
}
class CGUnionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def define(self):
templateVars = map(lambda t: getUnionTypeTemplateVars(t, self.descriptorProvider),
self.type.flatMemberTypes)
enumValues = [
" e%s(%s)," % (v["name"], v["typeName"]) for v in templateVars
]
enumConversions = [
" %s::e%s(ref inner) => inner.to_jsval(cx, rval),"
% (self.type, v["name"]) for v in templateVars
]
return ("""\
pub enum %s {
%s
}
impl ToJSValConvertible for %s {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
match *self {
%s
}
}
}
""") % (self.type, "\n".join(enumValues), self.type, "\n".join(enumConversions))
class CGUnionConversionStruct(CGThing):
def __init__(self, type, descriptorProvider):
assert not type.nullable()
assert not type.hasNullableType
CGThing.__init__(self)
self.type = type
self.descriptorProvider = descriptorProvider
def from_jsval(self):
memberTypes = self.type.flatMemberTypes
names = []
conversions = []
interfaceMemberTypes = filter(lambda t: t.isNonCallbackInterface(), memberTypes)
if len(interfaceMemberTypes) > 0:
def get_name(memberType):
if self.type.isGeckoInterface():
return memberType.inner.identifier.name
return memberType.name
def get_match(name):
return (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(%s::e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
typeNames = [get_name(memberType) for memberType in interfaceMemberTypes]
interfaceObject = CGList(CGGeneric(get_match(typeName)) for typeName in typeNames)
names.extend(typeNames)
else:
interfaceObject = None
arrayObjectMemberTypes = filter(lambda t: t.isArray() or t.isSequence(), memberTypes)
if len(arrayObjectMemberTypes) > 0:
assert len(arrayObjectMemberTypes) == 1
raise TypeError("Can't handle arrays or sequences in unions.")
else:
arrayObject = None
dateObjectMemberTypes = filter(lambda t: t.isDate(), memberTypes)
if len(dateObjectMemberTypes) > 0:
assert len(dateObjectMemberTypes) == 1
raise TypeError("Can't handle dates in unions.")
else:
dateObject = None
callbackMemberTypes = filter(lambda t: t.isCallback() or t.isCallbackInterface(), memberTypes)
if len(callbackMemberTypes) > 0:
assert len(callbackMemberTypes) == 1
raise TypeError("Can't handle callbacks in unions.")
else:
callbackObject = None
dictionaryMemberTypes = filter(lambda t: t.isDictionary(), memberTypes)
if len(dictionaryMemberTypes) > 0:
raise TypeError("No support for unwrapping dictionaries as member "
"of a union")
else:
dictionaryObject = None
if callbackObject or dictionaryObject:
assert False, "Not currently supported"
else:
nonPlatformObject = None
objectMemberTypes = filter(lambda t: t.isObject(), memberTypes)
if len(objectMemberTypes) > 0:
raise TypeError("Can't handle objects in unions.")
else:
object = None
hasObjectTypes = interfaceObject or arrayObject or dateObject or nonPlatformObject or object
if hasObjectTypes:
assert interfaceObject
templateBody = CGList([interfaceObject], "\n")
conversions.append(CGIfWrapper("value.get().is_object()", templateBody))
otherMemberTypes = [
t for t in memberTypes if t.isPrimitive() or t.isString() or t.isEnum()
]
if len(otherMemberTypes) > 0:
assert len(otherMemberTypes) == 1
memberType = otherMemberTypes[0]
if memberType.isEnum():
name = memberType.inner.identifier.name
else:
name = memberType.name
match = (
"match %s::TryConvertTo%s(cx, value) {\n"
" Err(_) => return Err(()),\n"
" Ok(Some(value)) => return Ok(%s::e%s(value)),\n"
" Ok(None) => (),\n"
"}\n") % (self.type, name, self.type, name)
conversions.append(CGGeneric(match))
names.append(name)
conversions.append(CGGeneric(
"throw_not_in_union(cx, \"%s\");\n"
"Err(())" % ", ".join(names)))
method = CGWrapper(
CGIndenter(CGList(conversions, "\n\n")),
pre="unsafe fn from_jsval(cx: *mut JSContext,\n"
" value: HandleValue, _option: ()) -> Result<%s, ()> {\n" % self.type,
post="\n}")
return CGWrapper(
CGIndenter(CGList([
CGGeneric("type Config = ();"),
method,
], "\n")),
pre="impl FromJSValConvertible for %s {\n" % self.type,
post="\n}")
def try_method(self, t):
templateVars = getUnionTypeTemplateVars(t, self.descriptorProvider)
returnType = "Result<Option<%s>, ()>" % templateVars["typeName"]
jsConversion = templateVars["jsConversion"]
return CGWrapper(
CGIndenter(jsConversion, 4),
pre="unsafe fn TryConvertTo%s(cx: *mut JSContext, value: HandleValue) -> %s {\n" % (t.name, returnType),
post="\n}")
def define(self):
from_jsval = self.from_jsval()
methods = CGIndenter(CGList([
self.try_method(t) for t in self.type.flatMemberTypes
], "\n\n"))
return """
%s
impl %s {
%s
}
""" % (from_jsval.define(), self.type, methods.define())
class ClassItem:
""" Use with CGClass """
def __init__(self, name, visibility):
self.name = name
self.visibility = visibility
def declare(self, cgClass):
assert False
def define(self, cgClass):
assert False
class ClassBase(ClassItem):
def __init__(self, name, visibility='pub'):
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s' % (self.visibility, self.name)
def define(self, cgClass):
# Only in the header
return ''
class ClassMethod(ClassItem):
def __init__(self, name, returnType, args, inline=False, static=False,
virtual=False, const=False, bodyInHeader=False,
templateArgs=None, visibility='public', body=None,
breakAfterReturnDecl="\n",
breakAfterSelf="\n", override=False):
"""
override indicates whether to flag the method as MOZ_OVERRIDE
"""
assert not override or virtual
assert not (override and static)
self.returnType = returnType
self.args = args
self.inline = False
self.static = static
self.virtual = virtual
self.const = const
self.bodyInHeader = True
self.templateArgs = templateArgs
self.body = body
self.breakAfterReturnDecl = breakAfterReturnDecl
self.breakAfterSelf = breakAfterSelf
self.override = override
ClassItem.__init__(self, name, visibility)
def getDecorators(self, declaring):
decorators = []
if self.inline:
decorators.append('inline')
if declaring:
if self.static:
decorators.append('static')
if self.virtual:
decorators.append('virtual')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getBody(self):
# Override me or pass a string to constructor
assert self.body is not None
return self.body
def declare(self, cgClass):
templateClause = '<%s>' % ', '.join(self.templateArgs) \
if self.bodyInHeader and self.templateArgs else ''
args = ', '.join([a.declare() for a in self.args])
if self.bodyInHeader:
body = CGIndenter(CGGeneric(self.getBody())).define()
body = ' {\n' + body + '\n}'
else:
body = ';'
return string.Template(
"${decorators}%s"
"${visibility}fn ${name}${templateClause}(${args})${returnType}${const}${override}${body}%s" %
(self.breakAfterReturnDecl, self.breakAfterSelf)
).substitute({
'templateClause': templateClause,
'decorators': self.getDecorators(True),
'returnType': (" -> %s" % self.returnType) if self.returnType else "",
'name': self.name,
'const': ' const' if self.const else '',
'override': ' MOZ_OVERRIDE' if self.override else '',
'args': args,
'body': body,
'visibility': self.visibility + ' ' if self.visibility != 'priv' else ''
})
def define(self, cgClass):
pass
class ClassConstructor(ClassItem):
"""
Used for adding a constructor to a CGClass.
args is a list of Argument objects that are the arguments taken by the
constructor.
inline should be True if the constructor should be marked inline.
bodyInHeader should be True if the body should be placed in the class
declaration in the header.
visibility determines the visibility of the constructor (public,
protected, private), defaults to private.
explicit should be True if the constructor should be marked explicit.
baseConstructors is a list of strings containing calls to base constructors,
defaults to None.
body contains a string with the code for the constructor, defaults to empty.
"""
def __init__(self, args, inline=False, bodyInHeader=False,
visibility="priv", explicit=False, baseConstructors=None,
body=""):
self.args = args
self.inline = False
self.bodyInHeader = bodyInHeader
self.explicit = explicit
self.baseConstructors = baseConstructors or []
self.body = body
ClassItem.__init__(self, None, visibility)
def getDecorators(self, declaring):
decorators = []
if self.explicit:
decorators.append('explicit')
if self.inline and declaring:
decorators.append('inline')
if decorators:
return ' '.join(decorators) + ' '
return ''
def getInitializationList(self, cgClass):
items = [str(c) for c in self.baseConstructors]
for m in cgClass.members:
if not m.static:
initialize = m.body
if initialize:
items.append(m.name + "(" + initialize + ")")
if len(items) > 0:
return '\n : ' + ',\n '.join(items)
return ''
def getBody(self, cgClass):
initializers = [" parent: %s" % str(self.baseConstructors[0])]
return (self.body + (
"let mut ret = Rc::new(%s {\n"
"%s\n"
"});\n"
"// Note: callback cannot be moved after calling init.\n"
"match Rc::get_mut(&mut ret) {\n"
" Some(ref mut callback) => callback.parent.init(%s),\n"
" None => unreachable!(),\n"
"};\n"
"ret") % (cgClass.name, '\n'.join(initializers), self.args[0].name))
def declare(self, cgClass):
args = ', '.join([a.declare() for a in self.args])
body = ' ' + self.getBody(cgClass)
body = stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
body = ' {\n' + body + '}'
return string.Template("""\
pub fn ${decorators}new(${args}) -> Rc<${className}>${body}
""").substitute({'decorators': self.getDecorators(True),
'className': cgClass.getNameString(),
'args': args,
'body': body})
def define(self, cgClass):
if self.bodyInHeader:
return ''
args = ', '.join([a.define() for a in self.args])
body = ' ' + self.getBody()
body = '\n' + stripTrailingWhitespace(body.replace('\n', '\n '))
if len(body) > 0:
body += '\n'
return string.Template("""\
${decorators}
${className}::${className}(${args})${initializationList}
{${body}}
""").substitute({'decorators': self.getDecorators(False),
'className': cgClass.getNameString(),
'args': args,
'initializationList': self.getInitializationList(cgClass),
'body': body})
class ClassMember(ClassItem):
def __init__(self, name, type, visibility="priv", static=False,
body=None):
self.type = type
self.static = static
self.body = body
ClassItem.__init__(self, name, visibility)
def declare(self, cgClass):
return '%s %s: %s,\n' % (self.visibility, self.name, self.type)
def define(self, cgClass):
if not self.static:
return ''
if self.body:
body = " = " + self.body
else:
body = ""
return '%s %s::%s%s;\n' % (self.type, cgClass.getNameString(),
self.name, body)
class CGClass(CGThing):
def __init__(self, name, bases=[], members=[], constructors=[],
destructor=None, methods=[],
typedefs=[], enums=[], unions=[], templateArgs=[],
templateSpecialization=[],
disallowCopyConstruction=False, indent='',
decorators='',
extradeclarations=''):
CGThing.__init__(self)
self.name = name
self.bases = bases
self.members = members
self.constructors = constructors
# We store our single destructor in a list, since all of our
# code wants lists of members.
self.destructors = [destructor] if destructor else []
self.methods = methods
self.typedefs = typedefs
self.enums = enums
self.unions = unions
self.templateArgs = templateArgs
self.templateSpecialization = templateSpecialization
self.disallowCopyConstruction = disallowCopyConstruction
self.indent = indent
self.decorators = decorators
self.extradeclarations = extradeclarations
def getNameString(self):
className = self.name
if self.templateSpecialization:
className = className + \
'<%s>' % ', '.join([str(a) for a
in self.templateSpecialization])
return className
def define(self):
result = ''
if self.templateArgs:
templateArgs = [a.declare() for a in self.templateArgs]
templateArgs = templateArgs[len(self.templateSpecialization):]
result = result + self.indent + 'template <%s>\n' % ','.join([str(a) for a in templateArgs])
if self.templateSpecialization:
specialization = \
'<%s>' % ', '.join([str(a) for a in self.templateSpecialization])
else:
specialization = ''
myself = ''
if self.decorators != '':
myself += self.decorators + '\n'
myself += '%spub struct %s%s' % (self.indent, self.name, specialization)
result += myself
assert len(self.bases) == 1 # XXjdm Can we support multiple inheritance?
result += ' {\n'
if self.bases:
self.members = [ClassMember("parent", self.bases[0].name, "pub")] + self.members
result += CGIndenter(CGGeneric(self.extradeclarations),
len(self.indent)).define()
def declareMembers(cgClass, memberList):
result = ''
for member in memberList:
declaration = member.declare(cgClass)
declaration = CGIndenter(CGGeneric(declaration)).define()
result = result + declaration
return result
if self.disallowCopyConstruction:
class DisallowedCopyConstructor(object):
def __init__(self):
self.visibility = "private"
def declare(self, cgClass):
name = cgClass.getNameString()
return ("%s(const %s&) MOZ_DELETE;\n"
"void operator=(const %s) MOZ_DELETE;\n" % (name, name, name))
disallowedCopyConstructors = [DisallowedCopyConstructor()]
else:
disallowedCopyConstructors = []
order = [(self.enums, ''), (self.unions, ''),
(self.typedefs, ''), (self.members, '')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += self.indent + '}\n\n'
result += 'impl %s {\n' % self.name
order = [(self.constructors + disallowedCopyConstructors, '\n'),
(self.destructors, '\n'), (self.methods, '\n)')]
for (memberList, separator) in order:
memberString = declareMembers(self, memberList)
if self.indent:
memberString = CGIndenter(CGGeneric(memberString),
len(self.indent)).define()
result = result + memberString
result += "}"
return result
class CGProxySpecialOperation(CGPerSignatureCall):
"""
Base class for classes for calling an indexed or named special operation
(don't use this directly, use the derived classes below).
"""
def __init__(self, descriptor, operation):
nativeName = MakeNativeName(descriptor.binaryNameFor(operation))
operation = descriptor.operations[operation]
assert len(operation.signatures()) == 1
signature = operation.signatures()[0]
(returnType, arguments) = signature
# We pass len(arguments) as the final argument so that the
# CGPerSignatureCall won't do any argument conversion of its own.
CGPerSignatureCall.__init__(self, returnType, "", arguments, nativeName,
False, descriptor, operation,
len(arguments))
if operation.isSetter() or operation.isCreator():
# arguments[0] is the index or name of the item that we're setting.
argument = arguments[1]
info = getJSToNativeConversionInfo(
argument.type, descriptor, treatNullAs=argument.treatNullAs,
exceptionCode="return false;")
template = info.template
declType = info.declType
templateValues = {
"val": "value.handle()",
}
self.cgRoot.prepend(instantiateJSToNativeConversionTemplate(
template, templateValues, declType, argument.identifier.name))
self.cgRoot.prepend(CGGeneric("let value = RootedValue::new(cx, desc.get().value);"))
elif operation.isGetter():
self.cgRoot.prepend(CGGeneric("let mut found = false;"))
def getArguments(self):
def process(arg):
argVal = arg.identifier.name
if arg.type.isGeckoInterface() and not arg.type.unroll().inner.isCallback():
argVal += ".r()"
return argVal
args = [(a, process(a)) for a in self.arguments]
if self.idlNode.isGetter():
args.append((FakeArgument(BuiltinTypes[IDLBuiltinType.Types.boolean],
self.idlNode),
"&mut found"))
return args
def wrap_return_value(self):
if not self.idlNode.isGetter() or self.templateValues is None:
return ""
wrap = CGGeneric(wrapForType(**self.templateValues))
wrap = CGIfWrapper("found", wrap)
return "\n" + wrap.define()
class CGProxyIndexedGetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedGetter')
class CGProxyIndexedSetter(CGProxySpecialOperation):
"""
Class to generate a call to an indexed setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'IndexedSetter')
class CGProxyNamedOperation(CGProxySpecialOperation):
"""
Class to generate a call to a named operation.
"""
def __init__(self, descriptor, name):
CGProxySpecialOperation.__init__(self, descriptor, name)
def define(self):
# Our first argument is the id we're getting.
argName = self.arguments[0].identifier.name
return ("let %s = jsid_to_str(cx, id);\n"
"let this = UnwrapProxy(proxy);\n"
"let this = &*this;\n" % argName +
CGProxySpecialOperation.define(self))
class CGProxyNamedGetter(CGProxyNamedOperation):
"""
Class to generate a call to an named getter. If templateValues is not None
the returned value will be wrapped with wrapForType using templateValues.
"""
def __init__(self, descriptor, templateValues=None):
self.templateValues = templateValues
CGProxySpecialOperation.__init__(self, descriptor, 'NamedGetter')
class CGProxyNamedPresenceChecker(CGProxyNamedGetter):
"""
Class to generate a call that checks whether a named property exists.
For now, we just delegate to CGProxyNamedGetter
"""
def __init__(self, descriptor):
CGProxyNamedGetter.__init__(self, descriptor)
class CGProxyNamedSetter(CGProxyNamedOperation):
"""
Class to generate a call to a named setter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedSetter')
class CGProxyNamedDeleter(CGProxyNamedOperation):
"""
Class to generate a call to a named deleter.
"""
def __init__(self, descriptor):
CGProxySpecialOperation.__init__(self, descriptor, 'NamedDeleter')
class CGProxyUnwrap(CGAbstractMethod):
def __init__(self, descriptor):
args = [Argument('HandleObject', 'obj')]
CGAbstractMethod.__init__(self, descriptor, "UnwrapProxy",
'*const ' + descriptor.concreteType, args,
alwaysInline=True, unsafe=True)
def definition_body(self):
return CGGeneric("""\
/*if (xpc::WrapperFactory::IsXrayWrapper(obj)) {
obj = js::UnwrapObject(obj);
}*/
//MOZ_ASSERT(IsProxy(obj));
let box_ = GetProxyPrivate(*obj.ptr).to_private() as *const %s;
return box_;""" % self.descriptor.concreteType)
class CGDOMJSProxyHandler_getOwnPropertyDescriptor(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('MutableHandle<JSPropertyDescriptor>', 'desc')]
CGAbstractExternMethod.__init__(self, descriptor, "getOwnPropertyDescriptor",
"bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
indexedSetter = self.descriptor.operations['IndexedSetter']
get = ""
if indexedGetter or indexedSetter:
get = "let index = get_array_index_from_id(cx, id);\n"
if indexedGetter:
readonly = toStringBool(self.descriptor.operations['IndexedSetter'] is None)
fillDescriptor = ("desc.get().value = result_root.ptr;\n"
"fill_property_descriptor(&mut *desc.ptr, *proxy.ptr, %s);\n"
"return true;" % readonly)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'let mut result_root = RootedValue::new(cx, UndefinedValue());'
}
get += ("if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
readonly = toStringBool(self.descriptor.operations['NamedSetter'] is None)
fillDescriptor = ("desc.get().value = result_root.ptr;\n"
"fill_property_descriptor(&mut *desc.ptr, *proxy.ptr, %s);\n"
"return true;" % readonly)
templateValues = {
'jsvalRef': 'result_root.handle_mut()',
'successCode': fillDescriptor,
'pre': 'let mut result_root = RootedValue::new(cx, UndefinedValue());'
}
# Once we start supporting OverrideBuiltins we need to make
# ResolveOwnProperty or EnumerateOwnProperties filter out named
# properties that shadow prototype properties.
namedGet = ("\n" +
"if RUST_JSID_IS_STRING(id) && !has_property_on_prototype(cx, proxy, id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() + "\n" +
"}\n")
else:
namedGet = ""
return get + """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
//if (!xpc::WrapperFactory::IsXrayWrapper(proxy) && (expando = GetExpandoObject(proxy))) {
if !expando.ptr.is_null() {
if !JS_GetPropertyDescriptorById(cx, expando.handle(), id, desc) {
return false;
}
if !desc.get().obj.is_null() {
// Pretend the property lives on the wrapper.
desc.get().obj = *proxy.ptr;
return true;
}
}
""" + namedGet + """\
desc.get().obj = ptr::null_mut();
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
# TODO(Issue 5876)
class CGDOMJSProxyHandler_defineProperty(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('Handle<JSPropertyDescriptor>', 'desc'),
Argument('*mut ObjectOpResult', 'opresult')]
CGAbstractExternMethod.__init__(self, descriptor, "defineProperty", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
indexedSetter = self.descriptor.operations['IndexedSetter']
if indexedSetter:
set += ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedSetter(self.descriptor)).define() +
" return true;\n" +
"}\n")
elif self.descriptor.operations['IndexedGetter']:
set += ("if get_array_index_from_id(cx, id).is_some() {\n" +
" return false;\n" +
" //return ThrowErrorMessage(cx, MSG_NO_PROPERTY_SETTER, \"%s\");\n" +
"}\n") % self.descriptor.name
namedSetter = self.descriptor.operations['NamedSetter']
if namedSetter:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a named setter on an interface that has "
"unforgeables. Figure out how that should work!")
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedSetter(self.descriptor)).define() +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n" +
"} else {\n" +
" return false;\n" +
"}\n")
else:
set += ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() +
" if (found) {\n"
# TODO(Issue 5876)
" //return js::IsInNonStrictPropertySet(cx)\n" +
" // ? opresult.succeed()\n" +
" // : ThrowErrorMessage(cx, MSG_NO_NAMED_SETTER, \"${name}\");\n" +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n" +
" }\n" +
" (*opresult).code_ = 0; /* SpecialCodes::OkCode */\n" +
" return true;\n"
"}\n") % (self.descriptor.name, self.descriptor.name)
set += "return proxyhandler::define_property(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_delete(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'),
Argument('*mut ObjectOpResult', 'res')]
CGAbstractExternMethod.__init__(self, descriptor, "delete", "bool", args)
self.descriptor = descriptor
def getBody(self):
set = ""
if self.descriptor.operations['NamedDeleter']:
if self.descriptor.hasUnforgeableMembers:
raise TypeError("Can't handle a deleter on an interface that has "
"unforgeables. Figure out how that should work!")
set += CGProxyNamedDeleter(self.descriptor).define()
set += "return proxyhandler::delete(%s);" % ", ".join(a.name for a in self.args)
return set
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_ownPropertyKeys(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'proxy'),
Argument('*mut AutoIdVector', 'props')]
CGAbstractExternMethod.__init__(self, descriptor, "own_property_keys", "bool", args)
self.descriptor = descriptor
def getBody(self):
body = dedent(
"""
let unwrapped_proxy = UnwrapProxy(proxy);
""")
if self.descriptor.operations['IndexedGetter']:
body += dedent(
"""
for i in 0..(*unwrapped_proxy).Length() {
let rooted_jsid = RootedId::new(cx, int_to_jsid(i as i32));
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
if self.descriptor.operations['NamedGetter']:
body += dedent(
"""
for name in (*unwrapped_proxy).SupportedPropertyNames() {
let cstring = CString::new(name).unwrap();
let jsstring = JS_InternString(cx, cstring.as_ptr());
let rooted = RootedString::new(cx, jsstring);
let jsid = INTERNED_STRING_TO_JSID(cx, rooted.handle().get());
let rooted_jsid = RootedId::new(cx, jsid);
AppendToAutoIdVector(props, rooted_jsid.handle().get());
}
""")
body += dedent(
"""
let expando = get_expando_object(proxy);
if !expando.is_null() {
let rooted_expando = RootedObject::new(cx, expando);
GetPropertyKeys(cx, rooted_expando.handle(), JSITER_OWNONLY | JSITER_HIDDEN | JSITER_SYMBOLS, props);
}
return true;
""")
return body
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_hasOwn(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleId', 'id'), Argument('*mut bool', 'bp')]
CGAbstractExternMethod.__init__(self, descriptor, "hasOwn", "bool", args)
self.descriptor = descriptor
def getBody(self):
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
indexed = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor)).define() + "\n" +
" *bp = found;\n" +
" return true;\n" +
"}\n\n")
else:
indexed = ""
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
named = ("if RUST_JSID_IS_STRING(id) && !has_property_on_prototype(cx, proxy, id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor)).define() + "\n" +
" *bp = found;\n"
" return true;\n"
"}\n" +
"\n")
else:
named = ""
return indexed + """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
if !expando.ptr.is_null() {
let mut b = true;
let ok = JS_HasPropertyById(cx, expando.handle(), id, &mut b);
*bp = b;
if !ok || *bp {
return ok;
}
}
""" + named + """\
*bp = false;
return true;"""
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_get(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', 'proxy'),
Argument('HandleObject', 'receiver'), Argument('HandleId', 'id'),
Argument('MutableHandleValue', 'vp')]
CGAbstractExternMethod.__init__(self, descriptor, "get", "bool", args)
self.descriptor = descriptor
def getBody(self):
getFromExpando = """\
let expando = RootedObject::new(cx, get_expando_object(proxy));
if !expando.ptr.is_null() {
let mut hasProp = false;
if !JS_HasPropertyById(cx, expando.handle(), id, &mut hasProp) {
return false;
}
if hasProp {
return JS_ForwardGetPropertyTo(cx, expando.handle(), id, receiver, vp);
}
}"""
templateValues = {
'jsvalRef': 'vp',
'successCode': 'return true;',
}
indexedGetter = self.descriptor.operations['IndexedGetter']
if indexedGetter:
getIndexedOrExpando = ("let index = get_array_index_from_id(cx, id);\n" +
"if let Some(index) = index {\n" +
" let this = UnwrapProxy(proxy);\n" +
" let this = &*this;\n" +
CGIndenter(CGProxyIndexedGetter(self.descriptor, templateValues)).define())
getIndexedOrExpando += """\
// Even if we don't have this index, we don't forward the
// get on to our expando object.
} else {
%s
}
""" % (stripTrailingWhitespace(getFromExpando.replace('\n', '\n ')))
else:
getIndexedOrExpando = getFromExpando + "\n"
namedGetter = self.descriptor.operations['NamedGetter']
if namedGetter:
getNamed = ("if RUST_JSID_IS_STRING(id) {\n" +
CGIndenter(CGProxyNamedGetter(self.descriptor, templateValues)).define() +
"}\n")
else:
getNamed = ""
return """\
//MOZ_ASSERT(!xpc::WrapperFactory::IsXrayWrapper(proxy),
//"Should not have a XrayWrapper here");
%s
let mut found = false;
if !get_property_on_prototype(cx, proxy, id, &mut found, vp) {
return false;
}
if found {
return true;
}
%s
*vp.ptr = UndefinedValue();
return true;""" % (getIndexedOrExpando, getNamed)
def definition_body(self):
return CGGeneric(self.getBody())
class CGDOMJSProxyHandler_className(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_proxy')]
CGAbstractExternMethod.__init__(self, descriptor, "className", "*const i8", args)
self.descriptor = descriptor
def getBody(self):
return '%s as *const u8 as *const i8' % str_to_const_array(self.descriptor.name)
def definition_body(self):
return CGGeneric(self.getBody())
class CGAbstractClassHook(CGAbstractExternMethod):
"""
Meant for implementing JSClass hooks, like Finalize or Trace. Does very raw
'this' unwrapping as it assumes that the unwrapped type is always known.
"""
def __init__(self, descriptor, name, returnType, args):
CGAbstractExternMethod.__init__(self, descriptor, name, returnType,
args)
def definition_body_prologue(self):
return CGGeneric("""
let this = native_from_object::<%s>(obj).unwrap();
""" % self.descriptor.concreteType)
def definition_body(self):
return CGList([
self.definition_body_prologue(),
self.generate_code(),
])
def generate_code(self):
raise NotImplementedError # Override me!
def finalizeHook(descriptor, hookName, context):
release = ""
if descriptor.isGlobal():
release += """\
finalize_global(obj);
"""
elif descriptor.weakReferenceable:
release += """\
let weak_box_ptr = JS_GetReservedSlot(obj, DOM_WEAK_SLOT).to_private() as *mut WeakBox<%s>;
if !weak_box_ptr.is_null() {
let count = {
let weak_box = &*weak_box_ptr;
assert!(weak_box.value.get().is_some());
assert!(weak_box.count.get() > 0);
weak_box.value.set(None);
let count = weak_box.count.get() - 1;
weak_box.count.set(count);
count
};
if count == 0 {
mem::drop(Box::from_raw(weak_box_ptr));
}
}
""" % descriptor.concreteType
release += """\
if !this.is_null() {
// The pointer can be null if the object is the unforgeable holder of that interface.
let _ = Box::from_raw(this as *mut %s);
}
debug!("%s finalize: {:p}", this);\
""" % (descriptor.concreteType, descriptor.concreteType)
return release
class CGClassTraceHook(CGAbstractClassHook):
"""
A hook to trace through our native object; used for GC and CC
"""
def __init__(self, descriptor):
args = [Argument('*mut JSTracer', 'trc'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, TRACE_HOOK_NAME, 'void',
args)
self.traceGlobal = descriptor.isGlobal()
def generate_code(self):
body = [CGGeneric("if this.is_null() { return; } // GC during obj creation\n"
"(*this).trace(%s);" % self.args[0].name)]
if self.traceGlobal:
body += [CGGeneric("trace_global(trc, obj);")]
return CGList(body, "\n")
class CGClassConstructHook(CGAbstractExternMethod):
"""
JS-visible constructor for our objects
"""
def __init__(self, descriptor, constructor=None):
args = [Argument('*mut JSContext', 'cx'), Argument('u32', 'argc'), Argument('*mut JSVal', 'vp')]
name = CONSTRUCT_HOOK_NAME
if constructor:
name += "_" + constructor.identifier.name
else:
constructor = descriptor.interface.ctor()
assert constructor
CGAbstractExternMethod.__init__(self, descriptor, name, 'bool', args)
self.constructor = constructor
def definition_body(self):
preamble = CGGeneric("""\
let global = global_root_from_object(JS_CALLEE(cx, vp).to_object());
let args = CallArgs::from_vp(vp, argc);
""")
name = self.constructor.identifier.name
nativeName = MakeNativeName(self.descriptor.binaryNameFor(name))
callGenerator = CGMethodCall(["global.r()"], nativeName, True,
self.descriptor, self.constructor)
return CGList([preamble, callGenerator])
class CGClassHasInstanceHook(CGAbstractExternMethod):
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'obj'),
Argument('MutableHandleValue', 'value'),
Argument('*mut bool', 'rval')]
assert descriptor.interface.hasInterfaceObject() and not descriptor.interface.isCallback()
CGAbstractExternMethod.__init__(self, descriptor, HASINSTANCE_HOOK_NAME,
'bool', args)
def definition_body(self):
id = "PrototypeList::ID::%s" % self.descriptor.interface.identifier.name
return CGGeneric("""\
match has_instance(cx, obj, value.handle(), %(id)s, %(index)s) {
Ok(result) => {
*rval = result;
true
}
Err(()) => false,
}
""" % {"id": id, "index": self.descriptor.prototypeDepth})
class CGClassFunToStringHook(CGAbstractExternMethod):
"""
A hook to convert functions to strings.
"""
def __init__(self, descriptor):
args = [Argument('*mut JSContext', 'cx'), Argument('HandleObject', '_obj'),
Argument('u32', '_indent')]
CGAbstractExternMethod.__init__(self, descriptor, "fun_to_string", '*mut JSString', args)
def definition_body(self):
name = self.descriptor.interface.identifier.name
string = str_to_const_array("function %s() {\\n [native code]\\n}" % name)
return CGGeneric("JS_NewStringCopyZ(cx, %s as *const _ as *const libc::c_char)" % string)
class CGClassFinalizeHook(CGAbstractClassHook):
"""
A hook for finalize, used to release our native object.
"""
def __init__(self, descriptor):
args = [Argument('*mut FreeOp', '_fop'), Argument('*mut JSObject', 'obj')]
CGAbstractClassHook.__init__(self, descriptor, FINALIZE_HOOK_NAME,
'void', args)
def generate_code(self):
return CGGeneric(finalizeHook(self.descriptor, self.name, self.args[0].name))
class CGDOMJSProxyHandlerDOMClass(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
self.descriptor = descriptor
def define(self):
return "static Class: DOMClass = " + DOMClass(self.descriptor) + ";\n"
class CGInterfaceTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
def attribute_arguments(needCx, argument=None):
if needCx:
yield "cx", "*mut JSContext"
if argument:
yield "value", argument_type(descriptor, argument)
def members():
for m in descriptor.interface.members:
if (m.isMethod() and not m.isStatic() and
(not m.isIdentifierLess() or m.isStringifier())):
name = CGSpecializedMethod.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m)
for idx, (rettype, arguments) in enumerate(m.signatures()):
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name + ('_' * idx), arguments, rettype
elif m.isAttr() and not m.isStatic():
name = CGSpecializedGetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, getter=True)
yield (name,
attribute_arguments(typeNeedsCx(m.type, True)),
return_type(descriptor, m.type, infallible))
if not m.readonly:
name = CGSpecializedSetter.makeNativeName(descriptor, m)
infallible = 'infallible' in descriptor.getExtendedAttributes(m, setter=True)
if infallible:
rettype = "()"
else:
rettype = "ErrorResult"
yield name, attribute_arguments(typeNeedsCx(m.type, False), m.type), rettype
if descriptor.proxy:
for name, operation in descriptor.operations.iteritems():
if not operation or operation.isStringifier():
continue
assert len(operation.signatures()) == 1
rettype, arguments = operation.signatures()[0]
infallible = 'infallible' in descriptor.getExtendedAttributes(operation)
if operation.isGetter():
arguments = method_arguments(descriptor, rettype, arguments, trailing=("found", "&mut bool"))
# If this interface 'supports named properties', then we
# should be able to access 'supported property names'
#
# WebIDL, Second Draft, section 3.2.4.5
# https://heycam.github.io/webidl/#idl-named-properties
if operation.isNamed():
yield "SupportedPropertyNames", [], "Vec<DOMString>"
else:
arguments = method_arguments(descriptor, rettype, arguments)
rettype = return_type(descriptor, rettype, infallible)
yield name, arguments, rettype
def fmt(arguments):
return "".join(", %s: %s" % argument for argument in arguments)
methods = [
CGGeneric("fn %s(&self%s) -> %s;\n" % (name, fmt(arguments), rettype))
for name, arguments, rettype in members()
]
if methods:
self.cgRoot = CGWrapper(CGIndenter(CGList(methods, "")),
pre="pub trait %sMethods {\n" % descriptor.interface.identifier.name,
post="}")
else:
self.cgRoot = CGGeneric("")
def define(self):
return self.cgRoot.define()
class CGWeakReferenceableTrait(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert descriptor.weakReferenceable
self.code = "impl WeakReferenceable for %s {}" % descriptor.interface.identifier.name
def define(self):
return self.code
class CGDescriptor(CGThing):
def __init__(self, descriptor):
CGThing.__init__(self)
assert not descriptor.concrete or not descriptor.interface.isCallback()
cgThings = []
if not descriptor.interface.isCallback():
cgThings.append(CGGetProtoObjectMethod(descriptor))
if descriptor.interface.hasInterfaceObject() and descriptor.hasDescendants():
cgThings.append(CGGetConstructorObjectMethod(descriptor))
for m in descriptor.interface.members:
if (m.isMethod() and
(not m.isIdentifierLess() or m == descriptor.operations["Stringifier"])):
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticMethod(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedMethod(descriptor, m))
cgThings.append(CGMemberJITInfo(descriptor, m))
elif m.isAttr():
if m.stringifier:
raise TypeError("Stringifier attributes not supported yet. "
"See https://github.com/servo/servo/issues/7590\n"
"%s" % m.location)
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticGetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedGetter(descriptor, m))
if not m.readonly:
if m.isStatic():
assert descriptor.interface.hasInterfaceObject()
cgThings.append(CGStaticSetter(descriptor, m))
elif not descriptor.interface.isCallback():
cgThings.append(CGSpecializedSetter(descriptor, m))
elif m.getExtendedAttribute("PutForwards"):
cgThings.append(CGSpecializedForwardingSetter(descriptor, m))
if (not m.isStatic() and not descriptor.interface.isCallback()):
cgThings.append(CGMemberJITInfo(descriptor, m))
if descriptor.concrete:
cgThings.append(CGClassFinalizeHook(descriptor))
cgThings.append(CGClassTraceHook(descriptor))
if descriptor.interface.hasInterfaceObject():
if descriptor.interface.ctor():
cgThings.append(CGClassConstructHook(descriptor))
for ctor in descriptor.interface.namedConstructors:
cgThings.append(CGClassConstructHook(descriptor, ctor))
if not descriptor.interface.isCallback():
cgThings.append(CGInterfaceObjectJSClass(descriptor))
cgThings.append(CGClassHasInstanceHook(descriptor))
cgThings.append(CGClassFunToStringHook(descriptor))
if not descriptor.interface.isCallback():
cgThings.append(CGPrototypeJSClass(descriptor))
properties = PropertyArrays(descriptor)
cgThings.append(CGGeneric(str(properties)))
cgThings.append(CGCreateInterfaceObjectsMethod(descriptor, properties))
cgThings.append(CGNamespace.build([descriptor.name + "Constants"],
CGConstant(m for m in descriptor.interface.members if m.isConst()),
public=True))
if descriptor.interface.hasInterfaceObject():
cgThings.append(CGDefineDOMInterfaceMethod(descriptor))
if descriptor.proxy:
cgThings.append(CGDefineProxyHandler(descriptor))
if descriptor.concrete:
if descriptor.proxy:
# cgThings.append(CGProxyIsProxy(descriptor))
cgThings.append(CGProxyUnwrap(descriptor))
cgThings.append(CGDOMJSProxyHandlerDOMClass(descriptor))
cgThings.append(CGDOMJSProxyHandler_ownPropertyKeys(descriptor))
cgThings.append(CGDOMJSProxyHandler_getOwnPropertyDescriptor(descriptor))
cgThings.append(CGDOMJSProxyHandler_className(descriptor))
cgThings.append(CGDOMJSProxyHandler_get(descriptor))
cgThings.append(CGDOMJSProxyHandler_hasOwn(descriptor))
if descriptor.operations['IndexedSetter'] or descriptor.operations['NamedSetter']:
cgThings.append(CGDOMJSProxyHandler_defineProperty(descriptor))
# We want to prevent indexed deleters from compiling at all.
assert not descriptor.operations['IndexedDeleter']
if descriptor.operations['NamedDeleter']:
cgThings.append(CGDOMJSProxyHandler_delete(descriptor))
# cgThings.append(CGDOMJSProxyHandler(descriptor))
# cgThings.append(CGIsMethod(descriptor))
pass
else:
cgThings.append(CGDOMJSClass(descriptor))
pass
cgThings.append(CGWrapMethod(descriptor))
if not descriptor.interface.isCallback():
if descriptor.concrete or descriptor.hasDescendants():
cgThings.append(CGIDLInterface(descriptor))
cgThings.append(CGInterfaceTrait(descriptor))
if descriptor.weakReferenceable:
cgThings.append(CGWeakReferenceableTrait(descriptor))
cgThings = CGList(cgThings, "\n")
# self.cgRoot = CGWrapper(CGNamespace(toBindingNamespace(descriptor.name),
# cgThings),
# post='\n')
self.cgRoot = cgThings
def define(self):
return self.cgRoot.define()
class CGNonNamespacedEnum(CGThing):
def __init__(self, enumName, names, first, comment="", deriving="", repr=""):
# Account for first value
entries = ["%s = %s" % (names[0], first)] + names[1:]
# Append a Last.
entries.append('Last = ' + str(first + len(entries)))
# Indent.
entries = [' ' + e for e in entries]
# Build the enum body.
enumstr = comment + 'pub enum %s {\n%s\n}\n' % (enumName, ',\n'.join(entries))
if repr:
enumstr = ('#[repr(%s)]\n' % repr) + enumstr
if deriving:
enumstr = ('#[derive(%s)]\n' % deriving) + enumstr
curr = CGGeneric(enumstr)
# Add some whitespace padding.
curr = CGWrapper(curr, pre='\n', post='\n')
# Add the typedef
# typedef = '\ntypedef %s::%s %s;\n\n' % (namespace, enumName, enumName)
# curr = CGList([curr, CGGeneric(typedef)])
# Save the result.
self.node = curr
def define(self):
return self.node.define()
class CGDictionary(CGThing):
def __init__(self, dictionary, descriptorProvider):
self.dictionary = dictionary
if all(CGDictionary(d, descriptorProvider).generatable for
d in CGDictionary.getDictionaryDependencies(dictionary)):
self.generatable = True
else:
self.generatable = False
# Nothing else to do here
return
self.memberInfo = [
(member,
getJSToNativeConversionInfo(member.type,
descriptorProvider,
isMember="Dictionary",
defaultValue=member.defaultValue,
exceptionCode="return Err(());"))
for member in dictionary.members]
def define(self):
if not self.generatable:
return ""
return self.struct() + "\n" + self.impl()
def struct(self):
d = self.dictionary
if d.parent:
inheritance = " pub parent: %s::%s,\n" % (self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
inheritance = ""
memberDecls = [" pub %s: %s," %
(self.makeMemberName(m[0].identifier.name), self.getMemberType(m))
for m in self.memberInfo]
return (string.Template(
"pub struct ${selfName} {\n" +
"${inheritance}" +
"\n".join(memberDecls) + "\n" +
"}").substitute({"selfName": self.makeClassName(d),
"inheritance": inheritance}))
def impl(self):
d = self.dictionary
if d.parent:
initParent = "parent: try!(%s::%s::new(cx, val)),\n" % (
self.makeModuleName(d.parent),
self.makeClassName(d.parent))
else:
initParent = ""
def memberInit(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
conversion = self.getMemberConversion(memberInfo, member.type)
return CGGeneric("%s: %s,\n" % (name, conversion.define()))
def memberInsert(memberInfo):
member, _ = memberInfo
name = self.makeMemberName(member.identifier.name)
insertion = ("let mut %s = RootedValue::new(cx, UndefinedValue());\n"
"self.%s.to_jsval(cx, %s.handle_mut());\n"
"set_dictionary_property(cx, obj.handle(), \"%s\", %s.handle()).unwrap();"
% (name, name, name, name, name))
return CGGeneric("%s\n" % insertion)
memberInits = CGList([memberInit(m) for m in self.memberInfo])
memberInserts = CGList([memberInsert(m) for m in self.memberInfo])
return string.Template(
"impl ${selfName} {\n"
" pub unsafe fn empty(cx: *mut JSContext) -> ${selfName} {\n"
" ${selfName}::new(cx, HandleValue::null()).unwrap()\n"
" }\n"
" pub unsafe fn new(cx: *mut JSContext, val: HandleValue) -> Result<${selfName}, ()> {\n"
" let object = if val.get().is_null_or_undefined() {\n"
" RootedObject::new(cx, ptr::null_mut())\n"
" } else if val.get().is_object() {\n"
" RootedObject::new(cx, val.get().to_object())\n"
" } else {\n"
" throw_type_error(cx, \"Value not an object.\");\n"
" return Err(());\n"
" };\n"
" Ok(${selfName} {\n"
"${initParent}"
"${initMembers}"
" })\n"
" }\n"
"}\n"
"\n"
"impl ToJSValConvertible for ${selfName} {\n"
" unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {\n"
" let obj = RootedObject::new(cx, JS_NewObject(cx, ptr::null()));\n"
"${insertMembers}"
" rval.set(ObjectOrNullValue(obj.ptr))\n"
" }\n"
"}\n").substitute({
"selfName": self.makeClassName(d),
"initParent": CGIndenter(CGGeneric(initParent), indentLevel=12).define(),
"initMembers": CGIndenter(memberInits, indentLevel=12).define(),
"insertMembers": CGIndenter(memberInserts, indentLevel=8).define(),
})
@staticmethod
def makeDictionaryName(dictionary):
return dictionary.identifier.name
def makeClassName(self, dictionary):
return self.makeDictionaryName(dictionary)
@staticmethod
def makeModuleName(dictionary):
return getModuleFromObject(dictionary)
def getMemberType(self, memberInfo):
member, info = memberInfo
declType = info.declType
if member.optional and not member.defaultValue:
declType = CGWrapper(info.declType, pre="Option<", post=">")
return declType.define()
def getMemberConversion(self, memberInfo, memberType):
def indent(s):
return CGIndenter(CGGeneric(s), 8).define()
member, info = memberInfo
templateBody = info.template
default = info.default
replacements = {"val": "rval.handle()"}
conversion = string.Template(templateBody).substitute(replacements)
if memberType.isAny():
conversion = "%s.get()" % conversion
assert (member.defaultValue is None) == (default is None)
if not member.optional:
assert default is None
default = ("throw_type_error(cx, \"Missing required member \\\"%s\\\".\");\n"
"return Err(());") % member.identifier.name
elif not default:
default = "None"
conversion = "Some(%s)" % conversion
conversion = (
"{\n"
"let mut rval = RootedValue::new(cx, UndefinedValue());\n"
"match try!(get_dictionary_property(cx, object.handle(), \"%s\", rval.handle_mut())) {\n"
" true => {\n"
"%s\n"
" },\n"
" false => {\n"
"%s\n"
" },\n"
"}\n}") % (member.identifier.name, indent(conversion), indent(default))
return CGGeneric(conversion)
@staticmethod
def makeMemberName(name):
# Can't use Rust keywords as member names.
if name == "type":
return name + "_"
return name
@staticmethod
def getDictionaryDependencies(dictionary):
deps = set()
if dictionary.parent:
deps.add(dictionary.parent)
for member in dictionary.members:
if member.type.isDictionary():
deps.add(member.type.unroll().inner)
return deps
class CGRegisterProtos(CGAbstractMethod):
def __init__(self, config):
arguments = [
Argument('*mut JSContext', 'cx'),
Argument('HandleObject', 'global'),
]
CGAbstractMethod.__init__(self, None, 'Register', 'void', arguments,
unsafe=False, pub=True)
self.config = config
def definition_body(self):
return CGList([
CGGeneric("codegen::Bindings::%sBinding::DefineDOMInterface(cx, global);" % desc.name)
for desc in self.config.getDescriptors(hasInterfaceObject=True, register=True)
], "\n")
class CGRegisterProxyHandlersMethod(CGAbstractMethod):
def __init__(self, descriptors):
docs = "Create the global vtables used by the generated DOM bindings to implement JS proxies."
CGAbstractMethod.__init__(self, None, 'RegisterProxyHandlers', 'void', [],
unsafe=True, pub=True, docs=docs)
self.descriptors = descriptors
def definition_body(self):
return CGList([
CGGeneric("proxy_handlers[Proxies::%s as usize] = codegen::Bindings::%sBinding::DefineProxyHandler();"
% (desc.name, desc.name))
for desc in self.descriptors
], "\n")
class CGRegisterProxyHandlers(CGThing):
def __init__(self, config):
descriptors = config.getDescriptors(proxy=True)
length = len(descriptors)
self.root = CGList([
CGGeneric("pub static mut proxy_handlers: [*const libc::c_void; %d] = [0 as *const libc::c_void; %d];"
% (length, length)),
CGRegisterProxyHandlersMethod(descriptors),
], "\n")
def define(self):
return self.root.define()
class CGBindingRoot(CGThing):
"""
Root codegen class for binding generation. Instantiate the class, and call
declare or define to generate header or cpp code (respectively).
"""
def __init__(self, config, prefix, webIDLFile):
descriptors = config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=True)
# We also want descriptors that have an interface prototype object
# (isCallback=False), but we don't want to include a second copy
# of descriptors that we also matched in the previous line
# (hence hasInterfaceObject=False).
descriptors.extend(config.getDescriptors(webIDLFile=webIDLFile,
hasInterfaceObject=False,
isCallback=False))
dictionaries = config.getDictionaries(webIDLFile=webIDLFile)
mainCallbacks = config.getCallbacks(webIDLFile=webIDLFile)
callbackDescriptors = config.getDescriptors(webIDLFile=webIDLFile,
isCallback=True)
enums = config.getEnums(webIDLFile)
if not (descriptors or dictionaries or mainCallbacks or callbackDescriptors or enums):
self.root = None
return
# Do codegen for all the enums.
cgthings = [CGEnum(e) for e in enums]
# Do codegen for all the dictionaries.
cgthings.extend([CGDictionary(d, config.getDescriptorProvider())
for d in dictionaries])
# Do codegen for all the callbacks.
cgthings.extend(CGList([CGCallbackFunction(c, config.getDescriptorProvider()),
CGCallbackFunctionImpl(c)], "\n")
for c in mainCallbacks)
# Do codegen for all the descriptors
cgthings.extend([CGDescriptor(x) for x in descriptors])
# Do codegen for all the callback interfaces.
cgthings.extend(CGList([CGCallbackInterface(x),
CGCallbackFunctionImpl(x.interface)], "\n")
for x in callbackDescriptors)
# And make sure we have the right number of newlines at the end
curr = CGWrapper(CGList(cgthings, "\n\n"), post="\n\n")
# Add imports
curr = CGImports(curr, descriptors + callbackDescriptors, mainCallbacks, [
'js',
'js::{JSCLASS_GLOBAL_SLOT_COUNT, JSCLASS_IMPLEMENTS_BARRIERS}',
'js::{JSCLASS_IS_DOMJSCLASS, JSCLASS_IS_GLOBAL, JSCLASS_RESERVED_SLOTS_MASK}',
'js::{JSCLASS_RESERVED_SLOTS_SHIFT, JSITER_HIDDEN, JSITER_OWNONLY}',
'js::{JSITER_SYMBOLS, JSPROP_ENUMERATE, JSPROP_PERMANENT, JSPROP_READONLY}',
'js::{JSPROP_SHARED, JS_CALLEE}',
'js::error::throw_type_error',
'js::jsapi::{AliasSet, ArgType, AutoIdVector, CallArgs, FreeOp}',
'js::jsapi::{GetGlobalForObjectCrossCompartment , GetPropertyKeys, Handle}',
'js::jsapi::{HandleId, HandleObject, HandleValue, HandleValueArray}',
'js::jsapi::{INTERNED_STRING_TO_JSID, IsCallable, JS_CallFunctionValue}',
'js::jsapi::{JS_ComputeThis, JS_CopyPropertiesFrom, JS_ForwardGetPropertyTo}',
'js::jsapi::{JS_GetClass, JS_GetFunctionPrototype, JS_GetGlobalForObject}',
'js::jsapi::{JS_GetObjectPrototype, JS_GetProperty, JS_GetPropertyById}',
'js::jsapi::{JS_GetPropertyDescriptorById, JS_GetReservedSlot, JS_HasProperty}',
'js::jsapi::{JS_HasPropertyById, JS_InitializePropertiesFromCompatibleNativeObject}',
'js::jsapi::{JS_InternString, JS_IsExceptionPending, JS_NewObject, JS_NewObjectWithGivenProto}',
'js::jsapi::{JS_NewObjectWithoutMetadata, JS_NewStringCopyZ, JS_SetProperty}',
'js::jsapi::{JS_SetPrototype, JS_SetReservedSlot, JS_WrapValue, JSAutoCompartment}',
'js::jsapi::{JSAutoRequest, JSContext, JSClass, JSFreeOp, JSFunctionSpec}',
'js::jsapi::{JSJitGetterCallArgs, JSJitInfo, JSJitMethodCallArgs, JSJitSetterCallArgs}',
'js::jsapi::{JSNative, JSObject, JSNativeWrapper, JSPropertyDescriptor, JSPropertySpec}',
'js::jsapi::{JSString, JSTracer, JSType, JSTypedMethodJitInfo, JSValueType}',
'js::jsapi::{ObjectOpResult, OpType, MutableHandle, MutableHandleObject}',
'js::jsapi::{MutableHandleValue, RootedId, RootedObject, RootedString}',
'js::jsapi::{RootedValue, SymbolCode, jsid}',
'js::jsval::JSVal',
'js::jsval::{ObjectValue, ObjectOrNullValue, PrivateValue}',
'js::jsval::{NullValue, UndefinedValue}',
'js::glue::{CallJitMethodOp, CallJitGetterOp, CallJitSetterOp, CreateProxyHandler}',
'js::glue::{GetProxyPrivate, NewProxyObject, ProxyTraps}',
'js::glue::{RUST_FUNCTION_VALUE_TO_JITINFO}',
'js::glue::{RUST_JS_NumberValue, RUST_JSID_IS_STRING, int_to_jsid}',
'js::glue::AppendToAutoIdVector',
'js::rust::{GCMethods, define_methods, define_properties}',
'dom::bindings',
'dom::bindings::global::{GlobalRef, global_root_from_object, global_root_from_reflector}',
'dom::bindings::interface::{NonCallbackInterfaceObjectClass, create_callback_interface_object}',
'dom::bindings::interface::{create_interface_prototype_object, create_named_constructors}',
'dom::bindings::interface::{create_noncallback_interface_object, has_instance}',
'dom::bindings::js::{JS, Root, RootedReference}',
'dom::bindings::js::{OptionalRootedReference}',
'dom::bindings::reflector::{Reflectable}',
'dom::bindings::utils::{ConstantSpec, DOMClass, DOMJSClass}',
'dom::bindings::utils::{DOM_PROTO_UNFORGEABLE_HOLDER_SLOT, JSCLASS_DOM_GLOBAL}',
'dom::bindings::utils::{NonNullJSNative, ProtoOrIfaceArray, create_dom_global}',
'dom::bindings::utils::{finalize_global, find_enum_string_index, generic_getter}',
'dom::bindings::utils::{generic_lenient_getter, generic_lenient_setter}',
'dom::bindings::utils::{generic_method, generic_setter, get_array_index_from_id}',
'dom::bindings::utils::{get_dictionary_property, get_property_on_prototype}',
'dom::bindings::utils::{get_proto_or_iface_array, has_property_on_prototype}',
'dom::bindings::utils::{is_platform_object, set_dictionary_property}',
'dom::bindings::utils::{throwing_constructor, trace_global}',
'dom::bindings::utils::ConstantVal::{IntVal, UintVal}',
'dom::bindings::trace::{JSTraceable, RootedTraceable}',
'dom::bindings::callback::{CallbackContainer,CallbackInterface,CallbackFunction}',
'dom::bindings::callback::{CallSetup,ExceptionHandling}',
'dom::bindings::callback::wrap_call_this_object',
'dom::bindings::conversions::{ConversionBehavior, DOM_OBJECT_SLOT, IDLInterface}',
'dom::bindings::conversions::{FromJSValConvertible, StringificationBehavior}',
'dom::bindings::conversions::{ToJSValConvertible, jsid_to_str, native_from_handlevalue}',
'dom::bindings::conversions::{native_from_object, private_from_object, root_from_object}',
'dom::bindings::conversions::{root_from_handleobject, root_from_handlevalue}',
'dom::bindings::codegen::{PrototypeList, RegisterBindings, UnionTypes}',
'dom::bindings::codegen::Bindings::*',
'dom::bindings::error::{Fallible, Error, ErrorResult}',
'dom::bindings::error::Error::JSFailed',
'dom::bindings::error::throw_dom_exception',
'dom::bindings::proxyhandler',
'dom::bindings::proxyhandler::{ensure_expando_object, fill_property_descriptor}',
'dom::bindings::proxyhandler::{get_expando_object, get_property_descriptor}',
'dom::bindings::num::Finite',
'dom::bindings::str::ByteString',
'dom::bindings::str::USVString',
'dom::bindings::trace::RootedVec',
'dom::bindings::weakref::{DOM_WEAK_SLOT, WeakBox, WeakReferenceable}',
'mem::heap_size_of_raw_self_and_children',
'libc',
'util::str::DOMString',
'std::borrow::ToOwned',
'std::cmp',
'std::mem',
'std::num',
'std::ptr',
'std::str',
'std::rc',
'std::rc::Rc',
'std::default::Default',
'std::ffi::CString',
])
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Store the final result.
self.root = curr
def define(self):
if not self.root:
return None
return stripTrailingWhitespace(self.root.define())
def argument_type(descriptorProvider, ty, optional=False, defaultValue=None, variadic=False):
info = getJSToNativeConversionInfo(
ty, descriptorProvider, isArgument=True)
declType = info.declType
if variadic:
if ty.isGeckoInterface():
declType = CGWrapper(declType, pre="&[", post="]")
else:
declType = CGWrapper(declType, pre="Vec<", post=">")
elif optional and not defaultValue:
declType = CGWrapper(declType, pre="Option<", post=">")
if ty.isDictionary():
declType = CGWrapper(declType, pre="&")
return declType.define()
def method_arguments(descriptorProvider, returnType, arguments, passJSBits=True, trailing=None):
if needCx(returnType, arguments, passJSBits):
yield "cx", "*mut JSContext"
for argument in arguments:
ty = argument_type(descriptorProvider, argument.type, argument.optional,
argument.defaultValue, argument.variadic)
yield CGDictionary.makeMemberName(argument.identifier.name), ty
if trailing:
yield trailing
def return_type(descriptorProvider, rettype, infallible):
result = getRetvalDeclarationForType(rettype, descriptorProvider)
if not infallible:
result = CGWrapper(result, pre="Fallible<", post=">")
return result.define()
class CGNativeMember(ClassMethod):
def __init__(self, descriptorProvider, member, name, signature, extendedAttrs,
breakAfter=True, passJSBitsAsNeeded=True, visibility="public"):
"""
If passJSBitsAsNeeded is false, we don't automatically pass in a
JSContext* or a JSObject* based on the return and argument types.
"""
self.descriptorProvider = descriptorProvider
self.member = member
self.extendedAttrs = extendedAttrs
self.passJSBitsAsNeeded = passJSBitsAsNeeded
breakAfterSelf = "\n" if breakAfter else ""
ClassMethod.__init__(self, name,
self.getReturnType(signature[0]),
self.getArgs(signature[0], signature[1]),
static=member.isStatic(),
# Mark our getters, which are attrs that
# have a non-void return type, as const.
const=(not member.isStatic() and member.isAttr() and
not signature[0].isVoid()),
breakAfterSelf=breakAfterSelf,
visibility=visibility)
def getReturnType(self, type):
infallible = 'infallible' in self.extendedAttrs
typeDecl = return_type(self.descriptorProvider, type, infallible)
return typeDecl
def getArgs(self, returnType, argList):
return [Argument(arg[1], arg[0]) for arg in method_arguments(self.descriptorProvider,
returnType,
argList,
self.passJSBitsAsNeeded)]
class CGCallback(CGClass):
def __init__(self, idlObject, descriptorProvider, baseName, methods,
getters=[], setters=[]):
self.baseName = baseName
self._deps = idlObject.getDeps()
name = idlObject.identifier.name
# For our public methods that needThisHandling we want most of the
# same args and the same return type as what CallbackMember
# generates. So we want to take advantage of all its
# CGNativeMember infrastructure, but that infrastructure can't deal
# with templates and most especially template arguments. So just
# cheat and have CallbackMember compute all those things for us.
realMethods = []
for method in methods:
if not method.needThisHandling:
realMethods.append(method)
else:
realMethods.extend(self.getMethodImpls(method))
CGClass.__init__(self, name,
bases=[ClassBase(baseName)],
constructors=self.getConstructors(),
methods=realMethods + getters + setters,
decorators="#[derive(JSTraceable, PartialEq)]")
def getConstructors(self):
return [ClassConstructor(
[Argument("*mut JSObject", "aCallback")],
bodyInHeader=True,
visibility="pub",
explicit=False,
baseConstructors=[
"%s::new()" % self.baseName
])]
def getMethodImpls(self, method):
assert method.needThisHandling
args = list(method.args)
# Strip out the JSContext*/JSObject* args
# that got added.
assert args[0].name == "cx" and args[0].argType == "*mut JSContext"
assert args[1].name == "aThisObj" and args[1].argType == "HandleObject"
args = args[2:]
# Record the names of all the arguments, so we can use them when we call
# the private method.
argnames = [arg.name for arg in args]
argnamesWithThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
argnamesWithoutThis = ["s.get_context()", "thisObjJS.handle()"] + argnames
# Now that we've recorded the argnames for our call to our private
# method, insert our optional argument for deciding whether the
# CallSetup should re-throw exceptions on aRv.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
# And now insert our template argument.
argsWithoutThis = list(args)
args.insert(0, Argument("&T", "thisObj"))
# And the self argument
method.args.insert(0, Argument(None, "&self"))
args.insert(0, Argument(None, "&self"))
argsWithoutThis.insert(0, Argument(None, "&self"))
setupCall = ("let s = CallSetup::new(self, aExceptionHandling);\n"
"if s.get_context().is_null() {\n"
" return Err(JSFailed);\n"
"}\n")
bodyWithThis = string.Template(
setupCall +
"let mut thisObjJS = RootedObject::new(s.get_context(), ptr::null_mut());\n"
"wrap_call_this_object(s.get_context(), thisObj, thisObjJS.handle_mut());\n"
"if thisObjJS.ptr.is_null() {\n"
" return Err(JSFailed);\n"
"}\n"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithThis),
"methodName": 'self.' + method.name,
})
bodyWithoutThis = string.Template(
setupCall +
"let thisObjJS = RootedObject::new(s.get_context(), ptr::null_mut());"
"return ${methodName}(${callArgs});").substitute({
"callArgs": ", ".join(argnamesWithoutThis),
"methodName": 'self.' + method.name,
})
return [ClassMethod(method.name + '_', method.returnType, args,
bodyInHeader=True,
templateArgs=["T: Reflectable"],
body=bodyWithThis,
visibility='pub'),
ClassMethod(method.name + '__', method.returnType, argsWithoutThis,
bodyInHeader=True,
body=bodyWithoutThis,
visibility='pub'),
method]
def deps(self):
return self._deps
# We're always fallible
def callbackGetterName(attr, descriptor):
return "Get" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
def callbackSetterName(attr, descriptor):
return "Set" + MakeNativeName(
descriptor.binaryNameFor(attr.identifier.name))
class CGCallbackFunction(CGCallback):
def __init__(self, callback, descriptorProvider):
CGCallback.__init__(self, callback, descriptorProvider,
"CallbackFunction",
methods=[CallCallback(callback, descriptorProvider)])
def getConstructors(self):
return CGCallback.getConstructors(self)
class CGCallbackFunctionImpl(CGGeneric):
def __init__(self, callback):
impl = string.Template("""\
impl CallbackContainer for ${type} {
fn new(callback: *mut JSObject) -> Rc<${type}> {
${type}::new(callback)
}
fn callback(&self) -> *mut JSObject {
self.parent.callback()
}
}
impl ToJSValConvertible for ${type} {
unsafe fn to_jsval(&self, cx: *mut JSContext, rval: MutableHandleValue) {
self.callback().to_jsval(cx, rval);
}
}\
""").substitute({"type": callback.identifier.name})
CGGeneric.__init__(self, impl)
class CGCallbackInterface(CGCallback):
def __init__(self, descriptor):
iface = descriptor.interface
attrs = [m for m in iface.members if m.isAttr() and not m.isStatic()]
getters = [CallbackGetter(a, descriptor) for a in attrs]
setters = [CallbackSetter(a, descriptor) for a in attrs
if not a.readonly]
methods = [m for m in iface.members
if m.isMethod() and not m.isStatic() and not m.isIdentifierLess()]
methods = [CallbackOperation(m, sig, descriptor) for m in methods
for sig in m.signatures()]
assert not iface.isJSImplemented() or not iface.ctor()
CGCallback.__init__(self, iface, descriptor, "CallbackInterface",
methods, getters=getters, setters=setters)
class FakeMember():
def __init__(self):
self.treatNullAs = "Default"
def isStatic(self):
return False
def isAttr(self):
return False
def isMethod(self):
return False
def getExtendedAttribute(self, name):
return None
class CallbackMember(CGNativeMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
"""
needThisHandling is True if we need to be able to accept a specified
thisObj, False otherwise.
"""
self.retvalType = sig[0]
self.originalSig = sig
args = sig[1]
self.argCount = len(args)
if self.argCount > 0:
# Check for variadic arguments
lastArg = args[self.argCount - 1]
if lastArg.variadic:
self.argCountStr = (
"(%d - 1) + %s.len()" % (self.argCount,
lastArg.identifier.name))
else:
self.argCountStr = "%d" % self.argCount
self.needThisHandling = needThisHandling
# If needThisHandling, we generate ourselves as private and the caller
# will handle generating public versions that handle the "this" stuff.
visibility = "priv" if needThisHandling else "pub"
# We don't care, for callback codegen, whether our original member was
# a method or attribute or whatnot. Just always pass FakeMember()
# here.
CGNativeMember.__init__(self, descriptorProvider, FakeMember(),
name, (self.retvalType, args),
extendedAttrs={},
passJSBitsAsNeeded=False,
visibility=visibility)
# We have to do all the generation of our body now, because
# the caller relies on us throwing if we can't manage it.
self.exceptionCode = "return Err(JSFailed);"
self.body = self.getImpl()
def getImpl(self):
replacements = {
"declRval": self.getRvalDecl(),
"returnResult": self.getResultConversion(),
"convertArgs": self.getArgConversions(),
"doCall": self.getCall(),
"setupCall": self.getCallSetup(),
}
if self.argCount > 0:
replacements["argCount"] = self.argCountStr
replacements["argvDecl"] = string.Template(
"let mut argv = vec![UndefinedValue(); ${argCount}];\n"
).substitute(replacements)
else:
# Avoid weird 0-sized arrays
replacements["argvDecl"] = ""
# Newlines and semicolons are in the values
pre = string.Template(
"${setupCall}"
"${declRval}"
"${argvDecl}").substitute(replacements)
body = string.Template(
"${convertArgs}"
"${doCall}"
"${returnResult}").substitute(replacements)
return CGWrapper(CGIndenter(CGList([
CGGeneric(pre),
CGGeneric(body),
], "\n"), 4), pre="unsafe {\n", post="\n}").define()
def getResultConversion(self):
replacements = {
"val": "rval.handle()",
}
info = getJSToNativeConversionInfo(
self.retvalType,
self.descriptorProvider,
exceptionCode=self.exceptionCode,
isCallbackReturnValue="Callback",
# XXXbz we should try to do better here
sourceDescription="return value")
template = info.template
declType = info.declType
convertType = instantiateJSToNativeConversionTemplate(
template, replacements, declType, "rvalDecl")
if self.retvalType is None or self.retvalType.isVoid():
retval = "()"
elif self.retvalType.isAny():
retval = "rvalDecl.get()"
else:
retval = "rvalDecl"
return "%s\nOk(%s)\n" % (convertType.define(), retval)
def getArgConversions(self):
# Just reget the arglist from self.originalSig, because our superclasses
# just have way to many members they like to clobber, so I can't find a
# safe member name to store it in.
argConversions = [self.getArgConversion(i, arg) for (i, arg)
in enumerate(self.originalSig[1])]
# Do them back to front, so our argc modifications will work
# correctly, because we examine trailing arguments first.
argConversions.reverse()
argConversions = [CGGeneric(c) for c in argConversions]
if self.argCount > 0:
argConversions.insert(0, self.getArgcDecl())
# And slap them together.
return CGList(argConversions, "\n\n").define() + "\n\n"
def getArgConversion(self, i, arg):
argval = arg.identifier.name
if arg.variadic:
argval = argval + "[idx].get()"
jsvalIndex = "%d + idx" % i
else:
jsvalIndex = "%d" % i
if arg.optional and not arg.defaultValue:
argval += ".clone().unwrap()"
conversion = wrapForType(
"argv_root.handle_mut()", result=argval,
successCode="argv[%s] = argv_root.ptr;" % jsvalIndex,
pre="let mut argv_root = RootedValue::new(cx, UndefinedValue());")
if arg.variadic:
conversion = string.Template(
"for idx in 0..${arg}.len() {\n" +
CGIndenter(CGGeneric(conversion)).define() + "\n"
"}"
).substitute({"arg": arg.identifier.name})
elif arg.optional and not arg.defaultValue:
conversion = (
CGIfWrapper("%s.is_some()" % arg.identifier.name,
CGGeneric(conversion)).define() +
" else if argc == %d {\n"
" // This is our current trailing argument; reduce argc\n"
" argc -= 1;\n"
"} else {\n"
" argv[%d] = UndefinedValue();\n"
"}" % (i + 1, i))
return conversion
def getArgs(self, returnType, argList):
args = CGNativeMember.getArgs(self, returnType, argList)
if not self.needThisHandling:
# Since we don't need this handling, we're the actual method that
# will be called, so we need an aRethrowExceptions argument.
args.append(Argument("ExceptionHandling", "aExceptionHandling",
"ReportExceptions"))
return args
# We want to allow the caller to pass in a "this" object, as
# well as a JSContext.
return [Argument("*mut JSContext", "cx"),
Argument("HandleObject", "aThisObj")] + args
def getCallSetup(self):
if self.needThisHandling:
# It's been done for us already
return ""
return (
"CallSetup s(CallbackPreserveColor(), aRv, aExceptionHandling);\n"
"JSContext* cx = s.get_context();\n"
"if (!cx) {\n"
" return Err(JSFailed);\n"
"}\n")
def getArgcDecl(self):
if self.argCount <= 1:
return CGGeneric("let argc = %s;" % self.argCountStr)
return CGGeneric("let mut argc = %s;" % self.argCountStr)
@staticmethod
def ensureASCIIName(idlObject):
type = "attribute" if idlObject.isAttr() else "operation"
if re.match("[^\x20-\x7E]", idlObject.identifier.name):
raise SyntaxError('Callback %s name "%s" contains non-ASCII '
"characters. We can't handle that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
if re.match('"', idlObject.identifier.name):
raise SyntaxError("Callback %s name '%s' contains "
"double-quote character. We can't handle "
"that. %s" %
(type, idlObject.identifier.name,
idlObject.location))
class CallbackMethod(CallbackMember):
def __init__(self, sig, name, descriptorProvider, needThisHandling):
CallbackMember.__init__(self, sig, name, descriptorProvider,
needThisHandling)
def getRvalDecl(self):
return "let mut rval = RootedValue::new(cx, UndefinedValue());\n"
def getCall(self):
replacements = {
"thisObj": self.getThisObj(),
"getCallable": self.getCallableDecl()
}
if self.argCount > 0:
replacements["argv"] = "argv.as_ptr()"
replacements["argc"] = "argc"
else:
replacements["argv"] = "ptr::null_mut()"
replacements["argc"] = "0"
return string.Template(
"${getCallable}"
"let rootedThis = RootedObject::new(cx, ${thisObj});\n"
"let ok = JS_CallFunctionValue(\n"
" cx, rootedThis.handle(), callable.handle(),\n"
" &HandleValueArray {\n"
" length_: ${argc} as ::libc::size_t,\n"
" elements_: ${argv}\n"
" }, rval.handle_mut());\n"
"if !ok {\n"
" return Err(JSFailed);\n"
"}\n").substitute(replacements)
class CallCallback(CallbackMethod):
def __init__(self, callback, descriptorProvider):
CallbackMethod.__init__(self, callback.signatures()[0], "Call",
descriptorProvider, needThisHandling=True)
def getThisObj(self):
return "aThisObj.get()"
def getCallableDecl(self):
return "let callable = RootedValue::new(cx, ObjectValue(&*self.parent.callback()));\n"
class CallbackOperationBase(CallbackMethod):
"""
Common class for implementing various callback operations.
"""
def __init__(self, signature, jsName, nativeName, descriptor, singleOperation):
self.singleOperation = singleOperation
self.methodName = jsName
CallbackMethod.__init__(self, signature, nativeName, descriptor, singleOperation)
def getThisObj(self):
if not self.singleOperation:
return "self.parent.callback()"
# This relies on getCallableDecl declaring a boolean
# isCallable in the case when we're a single-operation
# interface.
return "if isCallable { aThisObj.get() } else { self.parent.callback() }"
def getCallableDecl(self):
replacements = {
"methodName": self.methodName
}
getCallableFromProp = string.Template(
'RootedValue::new(cx, try!(self.parent.get_callable_property(cx, "${methodName}")))'
).substitute(replacements)
if not self.singleOperation:
return 'JS::Rooted<JS::Value> callable(cx);\n' + getCallableFromProp
return (
'let isCallable = IsCallable(self.parent.callback());\n'
'let callable =\n' +
CGIndenter(
CGIfElseWrapper('isCallable',
CGGeneric('RootedValue::new(cx, ObjectValue(&*self.parent.callback()))'),
CGGeneric(getCallableFromProp))).define() + ';\n')
class CallbackOperation(CallbackOperationBase):
"""
Codegen actual WebIDL operations on callback interfaces.
"""
def __init__(self, method, signature, descriptor):
self.ensureASCIIName(method)
jsName = method.identifier.name
CallbackOperationBase.__init__(self, signature,
jsName,
MakeNativeName(descriptor.binaryNameFor(jsName)),
descriptor, descriptor.interface.isSingleOperationInterface())
class CallbackGetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(attr.type, []),
callbackGetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
return "JS::Rooted<JS::Value> rval(cx, JS::UndefinedValue());\n"
def getCall(self):
replacements = {
"attrName": self.attrName
}
return string.Template(
'if (!JS_GetProperty(cx, mCallback, "${attrName}", &rval)) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
class CallbackSetter(CallbackMember):
def __init__(self, attr, descriptor):
self.ensureASCIIName(attr)
self.attrName = attr.identifier.name
CallbackMember.__init__(self,
(BuiltinTypes[IDLBuiltinType.Types.void],
[FakeArgument(attr.type, attr)]),
callbackSetterName(attr),
descriptor,
needThisHandling=False)
def getRvalDecl(self):
# We don't need an rval
return ""
def getCall(self):
replacements = {
"attrName": self.attrName,
"argv": "argv.handleAt(0)",
}
return string.Template(
'MOZ_ASSERT(argv.length() == 1);\n'
'if (!JS_SetProperty(cx, mCallback, "${attrName}", ${argv})) {\n'
' return Err(JSFailed);\n'
'}\n').substitute(replacements)
def getArgcDecl(self):
return None
class GlobalGenRoots():
"""
Roots for global codegen.
To generate code, call the method associated with the target, and then
call the appropriate define/declare method.
"""
@staticmethod
def PrototypeList(config):
# Prototype ID enum.
interfaces = config.getDescriptors(isCallback=False)
protos = [d.name for d in interfaces]
constructors = [d.name for d in interfaces if d.hasDescendants()]
proxies = [d.name for d in config.getDescriptors(proxy=True)]
return CGList([
CGGeneric(AUTOGENERATED_WARNING_COMMENT),
CGGeneric("pub const PROTO_OR_IFACE_LENGTH: usize = %d;\n" % (len(protos) + len(constructors))),
CGGeneric("pub const MAX_PROTO_CHAIN_LENGTH: usize = %d;\n\n" % config.maxProtoChainLength),
CGNonNamespacedEnum('ID', protos, 0, deriving="PartialEq, Copy, Clone", repr="u16"),
CGNonNamespacedEnum('Constructor', constructors, len(protos),
deriving="PartialEq, Copy, Clone", repr="u16"),
CGWrapper(CGIndenter(CGList([CGGeneric('"' + name + '"') for name in protos],
",\n"),
indentLevel=4),
pre="static INTERFACES: [&'static str; %d] = [\n" % len(protos),
post="\n];\n\n"),
CGGeneric("pub fn proto_id_to_name(proto_id: u16) -> &'static str {\n"
" debug_assert!(proto_id < ID::Last as u16);\n"
" INTERFACES[proto_id as usize]\n"
"}\n\n"),
CGNonNamespacedEnum('Proxies', proxies, 0, deriving="PartialEq, Copy, Clone"),
])
@staticmethod
def RegisterBindings(config):
# TODO - Generate the methods we want
code = CGList([
CGRegisterProtos(config),
CGRegisterProxyHandlers(config),
], "\n")
return CGImports(code, [], [], [
'dom::bindings::codegen',
'dom::bindings::codegen::PrototypeList::Proxies',
'js::jsapi::JSContext',
'js::jsapi::HandleObject',
'libc',
], ignored_warnings=[])
@staticmethod
def InterfaceTypes(config):
descriptors = [d.name for d in config.getDescriptors(register=True, isCallback=False)]
curr = CGList([CGGeneric("pub use dom::%s::%s;\n" % (name.lower(), name)) for name in descriptors])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def Bindings(config):
descriptors = (set(d.name + "Binding" for d in config.getDescriptors(register=True)) |
set(getModuleFromObject(d) for d in config.callbacks) |
set(getModuleFromObject(d) for d in config.getDictionaries()))
curr = CGList([CGGeneric("pub mod %s;\n" % name) for name in sorted(descriptors)])
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def InheritTypes(config):
descriptors = config.getDescriptors(register=True, isCallback=False)
imports = [CGGeneric("use dom::types::*;\n"),
CGGeneric("use dom::bindings::conversions::{DerivedFrom, get_dom_class};\n"),
CGGeneric("use dom::bindings::inheritance::Castable;\n"),
CGGeneric("use dom::bindings::js::{JS, LayoutJS, Root};\n"),
CGGeneric("use dom::bindings::trace::JSTraceable;\n"),
CGGeneric("use dom::bindings::reflector::Reflectable;\n"),
CGGeneric("use js::jsapi::JSTracer;\n\n"),
CGGeneric("use std::mem;\n\n")]
allprotos = []
topTypes = []
hierarchy = defaultdict(list)
for descriptor in descriptors:
name = descriptor.name
chain = descriptor.prototypeChain
upcast = descriptor.hasDescendants()
downcast = len(chain) != 1
if upcast and not downcast:
topTypes.append(name)
if not upcast:
# No other interface will implement DeriveFrom<Foo> for this Foo, so avoid
# implementing it for itself.
chain = chain[:-1]
# Implement `DerivedFrom<Bar>` for `Foo`, for all `Bar` that `Foo` inherits from.
if chain:
allprotos.append(CGGeneric("impl Castable for %s {}\n" % name))
for baseName in chain:
allprotos.append(CGGeneric("impl DerivedFrom<%s> for %s {}\n" % (baseName, name)))
if chain:
allprotos.append(CGGeneric("\n"))
if downcast:
hierarchy[descriptor.getParentName()].append(name)
typeIdCode = []
topTypeVariants = [
("ID used by abstract interfaces.", "Abstract"),
("ID used by interfaces that are not castable.", "Alone"),
]
topTypeVariants += [
("ID used by interfaces that derive from %s." % typeName, "%s(%sTypeId)" % (typeName, typeName))
for typeName in topTypes
]
topTypeVariantsAsStrings = [CGGeneric("/// %s\n%s," % variant) for variant in topTypeVariants]
typeIdCode.append(CGWrapper(CGIndenter(CGList(topTypeVariantsAsStrings, "\n"), 4),
pre="#[derive(Clone, Copy, Debug)]\npub enum TopTypeId {\n",
post="\n}\n\n"))
def type_id_variant(name):
# If `name` is present in the hierarchy keys', that means some other interfaces
# derive from it and this enum variant should have an argument with its own
# TypeId enum.
return "%s(%sTypeId)" % (name, name) if name in hierarchy else name
for base, derived in hierarchy.iteritems():
variants = []
if not config.getInterface(base).getExtendedAttribute("Abstract"):
variants.append(CGGeneric(base))
variants += [CGGeneric(type_id_variant(derivedName)) for derivedName in derived]
derives = "Clone, Copy, Debug"
if base != 'EventTarget' and base != 'HTMLElement':
derives += ", PartialEq"
typeIdCode.append(CGWrapper(CGIndenter(CGList(variants, ",\n"), 4),
pre="#[derive(%s)]\npub enum %sTypeId {\n" % (derives, base),
post="\n}\n\n"))
if base in topTypes:
typeIdCode.append(CGGeneric("""\
impl %(base)s {
pub fn type_id(&self) -> &'static %(base)sTypeId {
let domclass = unsafe {
get_dom_class(self.reflector().get_jsobject().get()).unwrap()
};
match domclass.type_id {
TopTypeId::%(base)s(ref type_id) => type_id,
_ => unreachable!(),
}
}
}
""" % {'base': base}))
curr = CGList(imports + typeIdCode + allprotos)
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
return curr
@staticmethod
def UnionTypes(config):
curr = UnionTypes(config.getDescriptors(),
config.getDictionaries(),
config.getCallbacks(),
config)
# Add the auto-generated comment.
curr = CGWrapper(curr, pre=AUTOGENERATED_WARNING_COMMENT)
# Done.
return curr
| mpl-2.0 |
City-of-Bloomington/green-rental | geopy/units.py | 24 | 2985 | """
Convert units.
"""
import math
# Angles
def degrees(radians=0, arcminutes=0, arcseconds=0): # pylint: disable=W0621
"""
TODO docs.
"""
deg = 0.
if radians:
deg = math.degrees(radians)
if arcminutes:
deg += arcminutes / arcmin(degrees=1.)
if arcseconds:
deg += arcseconds / arcsec(degrees=1.)
return deg
def radians(degrees=0, arcminutes=0, arcseconds=0): # pylint: disable=W0621
"""
TODO docs.
"""
if arcminutes:
degrees += arcminutes / arcmin(degrees=1.)
if arcseconds:
degrees += arcseconds / arcsec(degrees=1.)
return math.radians(degrees)
def arcminutes(degrees=0, radians=0, arcseconds=0): # pylint: disable=W0621
"""
TODO docs.
"""
if radians:
degrees += math.degrees(radians)
if arcseconds:
degrees += arcseconds / arcsec(degrees=1.)
return degrees * 60.
def arcseconds(degrees=0, radians=0, arcminutes=0): # pylint: disable=W0621
"""
TODO docs.
"""
if radians:
degrees += math.degrees(radians)
if arcminutes:
degrees += arcminutes / arcmin(degrees=1.)
return degrees * 3600.
# Lengths
def kilometers(meters=0, miles=0, feet=0, nautical=0): # pylint: disable=W0621
"""
TODO docs.
"""
ret = 0.
if meters:
ret += meters / 1000.
if feet:
miles += feet / ft(1.)
if nautical:
ret += nautical / nm(1.)
ret += miles * 1.609344
return ret
def meters(kilometers=0, miles=0, feet=0, nautical=0): # pylint: disable=W0621
"""
TODO docs.
"""
return (kilometers + km(nautical=nautical, miles=miles, feet=feet)) * 1000
def miles(kilometers=0, meters=0, feet=0, nautical=0): # pylint: disable=W0621
"""
TODO docs.
"""
ret = 0.
if nautical:
kilometers += nautical / nm(1.)
if feet:
ret += feet / ft(1.)
if meters:
kilometers += meters / 1000.
ret += kilometers * 0.621371192
return ret
def feet(kilometers=0, meters=0, miles=0, nautical=0): # pylint: disable=W0621
"""
TODO docs.
"""
ret = 0.
if nautical:
kilometers += nautical / nm(1.)
if meters:
kilometers += meters / 1000.
if kilometers:
miles += mi(kilometers=kilometers)
ret += miles * 5280
return ret
def nautical(kilometers=0, meters=0, miles=0, feet=0): # pylint: disable=W0621
"""
TODO docs.
"""
ret = 0.
if feet:
miles += feet / ft(1.)
if miles:
kilometers += km(miles=miles)
if meters:
kilometers += meters / 1000.
ret += kilometers / 1.852
return ret
# Compatible names
rad = radians # pylint: disable=C0103
arcmin = arcminutes # pylint: disable=C0103
arcsec = arcseconds # pylint: disable=C0103
km = kilometers # pylint: disable=C0103
m = meters # pylint: disable=C0103
mi = miles # pylint: disable=C0103
ft = feet # pylint: disable=C0103
nm = nautical # pylint: disable=C0103
| agpl-3.0 |
devs1991/test_edx_docmode | venv/lib/python2.7/site-packages/Crypto/SelfTest/Cipher/__init__.py | 117 | 2401 | # -*- coding: utf-8 -*-
#
# SelfTest/Cipher/__init__.py: Self-test for cipher modules
#
# Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test for cipher modules"""
__revision__ = "$Id$"
def get_tests(config={}):
tests = []
from Crypto.SelfTest.Cipher import test_AES; tests += test_AES.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_ARC2; tests += test_ARC2.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_ARC4; tests += test_ARC4.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_Blowfish; tests += test_Blowfish.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_CAST; tests += test_CAST.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_DES3; tests += test_DES3.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_DES; tests += test_DES.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_XOR; tests += test_XOR.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_pkcs1_15; tests += test_pkcs1_15.get_tests(config=config)
from Crypto.SelfTest.Cipher import test_pkcs1_oaep; tests += test_pkcs1_oaep.get_tests(config=config)
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| agpl-3.0 |
rwl/puddle | puddle/resource/action/properties_action.py | 1 | 3749 | #------------------------------------------------------------------------------
# Copyright (C) 2009 Richard W. Lincoln
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#------------------------------------------------------------------------------
""" Defines an action for viewing resource properties.
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from enthought.io.api import File
from enthought.traits.api import Bool, Instance
from enthought.traits.ui.api import View, Item, Group
from enthought.pyface.action.api import Action
#------------------------------------------------------------------------------
# "PropertiesAction" class:
#------------------------------------------------------------------------------
class PropertiesAction(Action):
""" Defines an action for viewing resource properties.
"""
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
# The action"s name (displayed on menus/tool bar tools etc):
name = "P&roperties"
# Keyboard accelerator:
accelerator = "Alt+Enter"
#--------------------------------------------------------------------------
# "Action" interface:
#--------------------------------------------------------------------------
def perform(self, event):
""" Perform the action.
"""
selections = self.window.selection
if selections:
selection = selections[0]
if isinstance(selection, File):
selection.edit_traits( parent=self.window.control,
view=self._create_resource_view(selection),
kind="livemodal" )
def _create_resource_view(self, selection):
""" Creates a resource view.
"""
resource_view = View(
Item(name="absolute_path", style="readonly"),
# FIXME: Readonly boolean editor is just blank
# Item(name="exists", style="readonly"),
# Item(name="is_file", style="readonly"),
# Item(name="is_folder", style="readonly"),
# Item(name="is_package", style="readonly"),
# Item(name="is_readonly", style="readonly"),
Item(name="mime_type", style="readonly"),
Item(name="url", style="readonly"),
title="Properties for %s" % selection.name+selection.ext,
icon=self.window.application.icon)
return resource_view
# EOF -------------------------------------------------------------------------
| mit |
zenmanenergy/Arduino | arduino-core/src/processing/app/i18n/python/requests/packages/charade/sbcsgroupprober.py | 2936 | 3291 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .sbcharsetprober import SingleByteCharSetProber
from .langcyrillicmodel import (Win1251CyrillicModel, Koi8rModel,
Latin5CyrillicModel, MacCyrillicModel,
Ibm866Model, Ibm855Model)
from .langgreekmodel import Latin7GreekModel, Win1253GreekModel
from .langbulgarianmodel import Latin5BulgarianModel, Win1251BulgarianModel
from .langhungarianmodel import Latin2HungarianModel, Win1250HungarianModel
from .langthaimodel import TIS620ThaiModel
from .langhebrewmodel import Win1255HebrewModel
from .hebrewprober import HebrewProber
class SBCSGroupProber(CharSetGroupProber):
def __init__(self):
CharSetGroupProber.__init__(self)
self._mProbers = [
SingleByteCharSetProber(Win1251CyrillicModel),
SingleByteCharSetProber(Koi8rModel),
SingleByteCharSetProber(Latin5CyrillicModel),
SingleByteCharSetProber(MacCyrillicModel),
SingleByteCharSetProber(Ibm866Model),
SingleByteCharSetProber(Ibm855Model),
SingleByteCharSetProber(Latin7GreekModel),
SingleByteCharSetProber(Win1253GreekModel),
SingleByteCharSetProber(Latin5BulgarianModel),
SingleByteCharSetProber(Win1251BulgarianModel),
SingleByteCharSetProber(Latin2HungarianModel),
SingleByteCharSetProber(Win1250HungarianModel),
SingleByteCharSetProber(TIS620ThaiModel),
]
hebrewProber = HebrewProber()
logicalHebrewProber = SingleByteCharSetProber(Win1255HebrewModel,
False, hebrewProber)
visualHebrewProber = SingleByteCharSetProber(Win1255HebrewModel, True,
hebrewProber)
hebrewProber.set_model_probers(logicalHebrewProber, visualHebrewProber)
self._mProbers.extend([hebrewProber, logicalHebrewProber,
visualHebrewProber])
self.reset()
| lgpl-2.1 |
yanheven/nova | nova/api/openstack/compute/schemas/v3/create_backup.py | 59 | 1321 | # Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.validation import parameter_types
create_backup = {
'type': 'object',
'properties': {
'createBackup': {
'type': 'object',
'properties': {
'name': parameter_types.name,
'backup_type': {
'type': 'string',
},
'rotation': parameter_types.non_negative_integer,
'metadata': {
'type': 'object',
}
},
'required': ['name', 'backup_type', 'rotation'],
'additionalProperties': False,
},
},
'required': ['createBackup'],
'additionalProperties': False,
}
| apache-2.0 |
nsteinme/phy | phy/detect/store.py | 2 | 7064 | # -*- coding: utf-8 -*-
"""Spike detection store."""
#------------------------------------------------------------------------------
# Imports
#------------------------------------------------------------------------------
import os
import os.path as op
from collections import defaultdict
import numpy as np
from six import string_types
from ..utils.array import (_save_arrays,
_load_arrays,
_concatenate,
)
from ..utils.logging import debug
from ..utils.settings import _ensure_dir_exists
#------------------------------------------------------------------------------
# Spike counts
#------------------------------------------------------------------------------
class SpikeCounts(object):
"""Count spikes in chunks and channel groups."""
def __init__(self, counts=None, groups=None, chunk_keys=None):
self._groups = groups
self._chunk_keys = chunk_keys
self._counts = counts or defaultdict(lambda: defaultdict(int))
def append(self, group=None, chunk_key=None, count=None):
self._counts[group][chunk_key] += count
@property
def counts(self):
return self._counts
def per_group(self, group):
return sum(self._counts.get(group, {}).values())
def per_chunk(self, chunk_key):
return sum(self._counts[group].get(chunk_key, 0)
for group in self._groups)
def __call__(self, group=None, chunk_key=None):
if group is not None and chunk_key is not None:
return self._counts.get(group, {}).get(chunk_key, 0)
elif group is not None:
return self.per_group(group)
elif chunk_key is not None:
return self.per_chunk(chunk_key)
elif group is None and chunk_key is None:
return sum(self.per_group(group) for group in self._groups)
#------------------------------------------------------------------------------
# Spike detection store
#------------------------------------------------------------------------------
class ArrayStore(object):
def __init__(self, root_dir):
self._root_dir = op.realpath(root_dir)
_ensure_dir_exists(self._root_dir)
def _rel_path(self, **kwargs):
"""Relative to the root."""
raise NotImplementedError()
def _path(self, **kwargs):
"""Absolute path of a data file."""
path = op.realpath(op.join(self._root_dir, self._rel_path(**kwargs)))
_ensure_dir_exists(op.dirname(path))
assert path.endswith('.npy')
return path
def _offsets_path(self, path):
assert path.endswith('.npy')
return op.splitext(path)[0] + '.offsets.npy'
def _contains_multiple_arrays(self, path):
return op.exists(path) and op.exists(self._offsets_path(path))
def store(self, data=None, **kwargs):
"""Store an array or list of arrays."""
path = self._path(**kwargs)
if isinstance(data, list):
if not data:
return
_save_arrays(path, data)
elif isinstance(data, np.ndarray):
dtype = data.dtype
if not data.size:
return
assert dtype != np.object
np.save(path, data)
# debug("Store {}.".format(path))
def load(self, **kwargs):
path = self._path(**kwargs)
if not op.exists(path):
debug("File `{}` doesn't exist.".format(path))
return
# Multiple arrays:
# debug("Load {}.".format(path))
if self._contains_multiple_arrays(path):
return _load_arrays(path)
else:
return np.load(path)
def delete(self, **kwargs):
path = self._path(**kwargs)
if op.exists(path):
os.remove(path)
# debug("Deleted `{}`.".format(path))
offsets_path = self._offsets_path(path)
if op.exists(offsets_path):
os.remove(offsets_path)
# debug("Deleted `{}`.".format(offsets_path))
class SpikeDetektStore(ArrayStore):
"""Store the following items:
* filtered
* components
* spike_samples
* features
* masks
"""
def __init__(self, root_dir, groups=None, chunk_keys=None):
super(SpikeDetektStore, self).__init__(root_dir)
self._groups = groups
self._chunk_keys = chunk_keys
self._spike_counts = SpikeCounts(groups=groups, chunk_keys=chunk_keys)
def _rel_path(self, name=None, chunk_key=None, group=None):
assert chunk_key >= 0
assert group is None or group >= 0
assert isinstance(name, string_types)
group = group if group is not None else 'all'
return 'group_{group}/{name}/chunk_{chunk:d}.npy'.format(
chunk=chunk_key, name=name, group=group)
@property
def groups(self):
return self._groups
@property
def chunk_keys(self):
return self._chunk_keys
def _iter(self, group=None, name=None):
for chunk_key in self.chunk_keys:
yield self.load(group=group, chunk_key=chunk_key, name=name)
def spike_samples(self, group=None):
if group is None:
return {group: self.spike_samples(group) for group in self._groups}
return self.concatenate(self._iter(group=group, name='spike_samples'))
def features(self, group=None):
"""Yield chunk features."""
if group is None:
return {group: self.features(group) for group in self._groups}
return self._iter(group=group, name='features')
def masks(self, group=None):
"""Yield chunk masks."""
if group is None:
return {group: self.masks(group) for group in self._groups}
return self._iter(group=group, name='masks')
@property
def spike_counts(self):
return self._spike_counts
def append(self, group=None, chunk_key=None,
spike_samples=None, features=None, masks=None,
spike_offset=0):
if spike_samples is None or len(spike_samples) == 0:
return
n = len(spike_samples)
assert features.shape[0] == n
assert masks.shape[0] == n
spike_samples = spike_samples + spike_offset
self.store(group=group, chunk_key=chunk_key,
name='features', data=features)
self.store(group=group, chunk_key=chunk_key,
name='masks', data=masks)
self.store(group=group, chunk_key=chunk_key,
name='spike_samples', data=spike_samples)
self._spike_counts.append(group=group, chunk_key=chunk_key, count=n)
def concatenate(self, arrays):
return _concatenate(arrays)
def delete_all(self, name):
"""Delete all files for a given data name."""
for group in self._groups:
for chunk_key in self._chunk_keys:
super(SpikeDetektStore, self).delete(name=name, group=group,
chunk_key=chunk_key)
| bsd-3-clause |
endlessm/endless-ndn | eos_data_distribution/parallel.py | 1 | 2456 | # -*- Mode:python; coding: utf-8; c-file-style:"gnu"; indent-tabs-mode:nil -*- */
#
# Copyright (C) 2016 Endless Mobile, Inc.
# Author: Niv Sardi <xaiki@endlessm.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# A copy of the GNU Lesser General Public License is in the file COPYING.
import logging
from gi.repository import GObject
logger = logging.getLogger(__name__)
class Batch(GObject.GObject):
__gsignals__ = {
'complete': (GObject.SIGNAL_RUN_FIRST, None, ()),
}
def __init__(self, workers, type="Batch"):
super(Batch, self).__init__()
self._type = type
self._incomplete_workers = set(workers)
for worker in self._incomplete_workers:
worker.connect('complete', self._on_batch_complete)
def start(self):
if not self._incomplete_workers:
logger.info('%s complete: no workers', self._type)
self.emit('complete')
for worker in self._incomplete_workers:
worker.start()
def _on_batch_complete(self, worker):
logger.info("%s complete: %s", self._type, worker)
self._incomplete_workers.remove(worker)
if len(self._incomplete_workers) == 0:
self.emit('complete')
if __name__ == '__main__':
import argparse
from . import utils
from gi.repository import GLib
from ndn.file import FileConsumer
parser = argparse.ArgumentParser()
parser.add_argument("-o", "--output")
parser.add_argument("-c", "--count", default=10, type=int)
args = utils.parse_args(parser=parser)
loop = GLib.MainLoop()
consumers = [FileConsumer("%s-%s"%(args.name, i), "%s-%s"%(args.output, i))
for i in range(args.count)]
batch = Batch(workers=consumers)
batch.connect('complete', lambda *a: loop.quit())
batch.start()
loop.run()
| lgpl-3.0 |
Monoide/vim_conf | autoload/conque_term/conque_sole_subprocess.py | 13 | 21952 | # FILE: autoload/conque_term/conque_sole_subprocess.py
# AUTHOR: Nico Raffo <nicoraffo@gmail.com>
# WEBSITE: http://conque.googlecode.com
# MODIFIED: 2011-08-12
# VERSION: 2.2, for Vim 7.0
# LICENSE:
# Conque - Vim terminal/console emulator
# Copyright (C) 2009-__YEAR__ Nico Raffo
#
# MIT License
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
""" ConqueSoleSubprocess
Creates a new subprocess with it's own (hidden) console window.
Mirrors console window text onto a block of shared memory (mmap), along with
text attribute data. Also handles translation of text input into the format
Windows console expects.
Sample Usage:
sh = ConqueSoleSubprocess()
sh.open("cmd.exe", "unique_str")
shm_in = ConqueSoleSharedMemory(mem_key = "unique_str", mem_type = "input", ...)
shm_out = ConqueSoleSharedMemory(mem_key = "unique_str", mem_type = "output", ...)
output = shm_out.read(...)
shm_in.write("dir\r")
output = shm_out.read(...)
"""
import time
import re
import os
import ctypes
from conque_globals import *
from conque_win32_util import *
from conque_sole_shared_memory import *
class ConqueSoleSubprocess():
# subprocess handle and pid
handle = None
pid = None
# input / output handles
stdin = None
stdout = None
# size of console window
window_width = 160
window_height = 40
# max lines for the console buffer
buffer_width = 160
buffer_height = 100
# keep track of the buffer number at the top of the window
top = 0
line_offset = 0
# buffer height is CONQUE_SOLE_BUFFER_LENGTH * output_blocks
output_blocks = 1
# cursor position
cursor_line = 0
cursor_col = 0
# console data, array of lines
data = []
# console attribute data, array of array of int
attributes = []
attribute_cache = {}
# default attribute
default_attribute = 7
# shared memory objects
shm_input = None
shm_output = None
shm_attributes = None
shm_stats = None
shm_command = None
shm_rescroll = None
shm_resize = None
# are we still a valid process?
is_alive = True
# running in fast mode
fast_mode = 0
# used for periodic execution of screen and memory redrawing
screen_redraw_ct = 0
mem_redraw_ct = 0
def open(self, cmd, mem_key, options={}):
""" Create subproccess running in hidden console window. """
self.reset = True
try:
# if we're already attached to a console, then unattach
try:
ctypes.windll.kernel32.FreeConsole()
except:
pass
# set buffer height
self.buffer_height = CONQUE_SOLE_BUFFER_LENGTH
if 'LINES' in options and 'COLUMNS' in options:
self.window_width = options['COLUMNS']
self.window_height = options['LINES']
self.buffer_width = options['COLUMNS']
# fast mode
self.fast_mode = options['FAST_MODE']
# console window options
si = STARTUPINFO()
# hide window
si.dwFlags |= STARTF_USESHOWWINDOW
si.wShowWindow = SW_HIDE
#si.wShowWindow = SW_MINIMIZE
# process options
flags = NORMAL_PRIORITY_CLASS | CREATE_NEW_PROCESS_GROUP | CREATE_UNICODE_ENVIRONMENT | CREATE_NEW_CONSOLE
# created process info
pi = PROCESS_INFORMATION()
# create the process!
res = ctypes.windll.kernel32.CreateProcessW(None, u(cmd), None, None, 0, flags, None, u('.'), ctypes.byref(si), ctypes.byref(pi))
# process info
self.pid = pi.dwProcessId
self.handle = pi.hProcess
# attach ourselves to the new console
# console is not immediately available
for i in range(10):
time.sleep(0.25)
try:
res = ctypes.windll.kernel32.AttachConsole(self.pid)
break
except:
pass
# get input / output handles
self.stdout = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
self.stdin = ctypes.windll.kernel32.GetStdHandle(STD_INPUT_HANDLE)
# set buffer size
size = COORD(self.buffer_width, self.buffer_height)
res = ctypes.windll.kernel32.SetConsoleScreenBufferSize(self.stdout, size)
# prev set size call needs to process
time.sleep(0.2)
# set window size
self.set_window_size(self.window_width, self.window_height)
# set utf-8 code page
if 'CODE_PAGE' in options and options['CODE_PAGE'] > 0:
if ctypes.windll.kernel32.IsValidCodePage(ctypes.c_uint(options['CODE_PAGE'])):
ctypes.windll.kernel32.SetConsoleCP(ctypes.c_uint(options['CODE_PAGE']))
ctypes.windll.kernel32.SetConsoleOutputCP(ctypes.c_uint(options['CODE_PAGE']))
# init shared memory
self.init_shared_memory(mem_key)
# init read buffers
self.tc = ctypes.create_unicode_buffer(self.buffer_width)
self.ac = ctypes.create_unicode_buffer(self.buffer_width)
return True
except:
return False
def init_shared_memory(self, mem_key):
""" Create shared memory objects. """
self.shm_input = ConqueSoleSharedMemory(CONQUE_SOLE_INPUT_SIZE, 'input', mem_key)
self.shm_input.create('write')
self.shm_input.clear()
self.shm_output = ConqueSoleSharedMemory(self.buffer_height * self.buffer_width, 'output', mem_key, True)
self.shm_output.create('write')
self.shm_output.clear()
if not self.fast_mode:
buf_info = self.get_buffer_info()
self.shm_attributes = ConqueSoleSharedMemory(self.buffer_height * self.buffer_width, 'attributes', mem_key, True, chr(buf_info.wAttributes), encoding='latin-1')
self.shm_attributes.create('write')
self.shm_attributes.clear()
self.shm_stats = ConqueSoleSharedMemory(CONQUE_SOLE_STATS_SIZE, 'stats', mem_key, serialize=True)
self.shm_stats.create('write')
self.shm_stats.clear()
self.shm_command = ConqueSoleSharedMemory(CONQUE_SOLE_COMMANDS_SIZE, 'command', mem_key, serialize=True)
self.shm_command.create('write')
self.shm_command.clear()
self.shm_resize = ConqueSoleSharedMemory(CONQUE_SOLE_RESIZE_SIZE, 'resize', mem_key, serialize=True)
self.shm_resize.create('write')
self.shm_resize.clear()
self.shm_rescroll = ConqueSoleSharedMemory(CONQUE_SOLE_RESCROLL_SIZE, 'rescroll', mem_key, serialize=True)
self.shm_rescroll.create('write')
self.shm_rescroll.clear()
return True
def check_commands(self):
""" Check for and process commands from Vim. """
cmd = self.shm_command.read()
if cmd:
# shut it all down
if cmd['cmd'] == 'close':
# clear command
self.shm_command.clear()
self.close()
return
cmd = self.shm_resize.read()
if cmd:
# clear command
self.shm_resize.clear()
# resize console
if cmd['cmd'] == 'resize':
# only change buffer width if it's larger
if cmd['data']['width'] > self.buffer_width:
self.buffer_width = cmd['data']['width']
# always change console width and height
self.window_width = cmd['data']['width']
self.window_height = cmd['data']['height']
# reset the console
buf_info = self.get_buffer_info()
self.reset_console(buf_info, add_block=False)
def read(self):
""" Read from windows console and update shared memory blocks. """
# no point really
if self.screen_redraw_ct == 0 and not self.is_alive():
stats = {'top_offset': 0, 'default_attribute': 0, 'cursor_x': 0, 'cursor_y': self.cursor_line, 'is_alive': 0}
self.shm_stats.write(stats)
return
# check for commands
self.check_commands()
# get cursor position
buf_info = self.get_buffer_info()
curs_line = buf_info.dwCursorPosition.Y
curs_col = buf_info.dwCursorPosition.X
# set update range
if curs_line != self.cursor_line or self.top != buf_info.srWindow.Top or self.screen_redraw_ct == CONQUE_SOLE_SCREEN_REDRAW:
self.screen_redraw_ct = 0
read_start = self.top
read_end = max([buf_info.srWindow.Bottom + 1, curs_line + 1])
else:
read_start = curs_line
read_end = curs_line + 1
# vars used in for loop
coord = COORD(0, 0)
chars_read = ctypes.c_int(0)
# read new data
for i in range(read_start, read_end):
coord.Y = i
res = ctypes.windll.kernel32.ReadConsoleOutputCharacterW(self.stdout, ctypes.byref(self.tc), self.buffer_width, coord, ctypes.byref(chars_read))
if not self.fast_mode:
ctypes.windll.kernel32.ReadConsoleOutputAttribute(self.stdout, ctypes.byref(self.ac), self.buffer_width, coord, ctypes.byref(chars_read))
t = self.tc.value
if not self.fast_mode:
a = self.ac.value
# add data
if i >= len(self.data):
for j in range(len(self.data), i + 1):
self.data.append('')
if not self.fast_mode:
self.attributes.append('')
self.data[i] = t
if not self.fast_mode:
self.attributes[i] = a
#for i in range(0, len(t)):
# write new output to shared memory
try:
if self.mem_redraw_ct == CONQUE_SOLE_MEM_REDRAW:
self.mem_redraw_ct = 0
for i in range(0, len(self.data)):
self.shm_output.write(text=self.data[i], start=self.buffer_width * i)
if not self.fast_mode:
self.shm_attributes.write(text=self.attributes[i], start=self.buffer_width * i)
else:
for i in range(read_start, read_end):
self.shm_output.write(text=self.data[i], start=self.buffer_width * i)
if not self.fast_mode:
self.shm_attributes.write(text=self.attributes[i], start=self.buffer_width * i)
#self.shm_output.write(text=''.join(self.data[read_start:read_end]), start=read_start * self.buffer_width)
#self.shm_attributes.write(text=''.join(self.attributes[read_start:read_end]), start=read_start * self.buffer_width)
# write cursor position to shared memory
stats = {'top_offset': buf_info.srWindow.Top, 'default_attribute': buf_info.wAttributes, 'cursor_x': curs_col, 'cursor_y': curs_line, 'is_alive': 1}
self.shm_stats.write(stats)
# adjust screen position
self.top = buf_info.srWindow.Top
self.cursor_line = curs_line
# check for reset
if curs_line > buf_info.dwSize.Y - 200:
self.reset_console(buf_info)
except:
pass
# increment redraw counters
self.screen_redraw_ct += 1
self.mem_redraw_ct += 1
return None
def reset_console(self, buf_info, add_block=True):
""" Extend the height of the current console if the cursor postion gets within 200 lines of the current size. """
# sometimes we just want to change the buffer width,
# in which case no need to add another block
if add_block:
self.output_blocks += 1
# close down old memory
self.shm_output.close()
self.shm_output = None
if not self.fast_mode:
self.shm_attributes.close()
self.shm_attributes = None
# new shared memory key
mem_key = 'mk' + str(time.time())
# reallocate memory
self.shm_output = ConqueSoleSharedMemory(self.buffer_height * self.buffer_width * self.output_blocks, 'output', mem_key, True)
self.shm_output.create('write')
self.shm_output.clear()
# backfill data
if len(self.data[0]) < self.buffer_width:
for i in range(0, len(self.data)):
self.data[i] = self.data[i] + ' ' * (self.buffer_width - len(self.data[i]))
self.shm_output.write(''.join(self.data))
if not self.fast_mode:
self.shm_attributes = ConqueSoleSharedMemory(self.buffer_height * self.buffer_width * self.output_blocks, 'attributes', mem_key, True, chr(buf_info.wAttributes), encoding='latin-1')
self.shm_attributes.create('write')
self.shm_attributes.clear()
# backfill attributes
if len(self.attributes[0]) < self.buffer_width:
for i in range(0, len(self.attributes)):
self.attributes[i] = self.attributes[i] + chr(buf_info.wAttributes) * (self.buffer_width - len(self.attributes[i]))
if not self.fast_mode:
self.shm_attributes.write(''.join(self.attributes))
# notify wrapper of new output block
self.shm_rescroll.write({'cmd': 'new_output', 'data': {'blocks': self.output_blocks, 'mem_key': mem_key}})
# set buffer size
size = COORD(X=self.buffer_width, Y=self.buffer_height * self.output_blocks)
res = ctypes.windll.kernel32.SetConsoleScreenBufferSize(self.stdout, size)
# prev set size call needs to process
time.sleep(0.2)
# set window size
self.set_window_size(self.window_width, self.window_height)
# init read buffers
self.tc = ctypes.create_unicode_buffer(self.buffer_width)
self.ac = ctypes.create_unicode_buffer(self.buffer_width)
def write(self):
""" Write text to console.
This function just parses out special sequences for special key events
and passes on the text to the plain or virtual key functions.
"""
# get input from shared mem
text = self.shm_input.read()
# nothing to do here
if text == u(''):
return
# clear input queue
self.shm_input.clear()
# split on VK codes
chunks = CONQUE_WIN32_REGEX_VK.split(text)
# if len() is one then no vks
if len(chunks) == 1:
self.write_plain(text)
return
# loop over chunks and delegate
for t in chunks:
if t == '':
continue
if CONQUE_WIN32_REGEX_VK.match(t):
self.write_vk(t[2:-2])
else:
self.write_plain(t)
def write_plain(self, text):
""" Write simple text to subprocess. """
li = INPUT_RECORD * len(text)
list_input = li()
for i in range(0, len(text)):
# create keyboard input
ke = KEY_EVENT_RECORD()
ke.bKeyDown = ctypes.c_byte(1)
ke.wRepeatCount = ctypes.c_short(1)
cnum = ord(text[i])
ke.wVirtualKeyCode = ctypes.windll.user32.VkKeyScanW(cnum)
ke.wVirtualScanCode = ctypes.c_short(ctypes.windll.user32.MapVirtualKeyW(int(cnum), 0))
if cnum > 31:
ke.uChar.UnicodeChar = uchr(cnum)
elif cnum == 3:
ctypes.windll.kernel32.GenerateConsoleCtrlEvent(0, self.pid)
ke.uChar.UnicodeChar = uchr(cnum)
ke.wVirtualKeyCode = ctypes.windll.user32.VkKeyScanW(cnum + 96)
ke.dwControlKeyState |= LEFT_CTRL_PRESSED
else:
ke.uChar.UnicodeChar = uchr(cnum)
if cnum in CONQUE_WINDOWS_VK_INV:
ke.wVirtualKeyCode = cnum
else:
ke.wVirtualKeyCode = ctypes.windll.user32.VkKeyScanW(cnum + 96)
ke.dwControlKeyState |= LEFT_CTRL_PRESSED
kc = INPUT_RECORD(KEY_EVENT)
kc.Event.KeyEvent = ke
list_input[i] = kc
# write input array
events_written = ctypes.c_int()
res = ctypes.windll.kernel32.WriteConsoleInputW(self.stdin, list_input, len(text), ctypes.byref(events_written))
def write_vk(self, vk_code):
""" Write special characters to console subprocess. """
code = None
ctrl_pressed = False
# this could be made more generic when more attributes
# other than ctrl_pressed are available
vk_attributes = vk_code.split(';')
for attr in vk_attributes:
if attr == CONQUE_VK_ATTR_CTRL_PRESSED:
ctrl_pressed = True
else:
code = attr
li = INPUT_RECORD * 1
# create keyboard input
ke = KEY_EVENT_RECORD()
ke.uChar.UnicodeChar = uchr(0)
ke.wVirtualKeyCode = ctypes.c_short(int(code))
ke.wVirtualScanCode = ctypes.c_short(ctypes.windll.user32.MapVirtualKeyW(int(code), 0))
ke.bKeyDown = ctypes.c_byte(1)
ke.wRepeatCount = ctypes.c_short(1)
# set enhanced key mode for arrow keys
if code in CONQUE_WINDOWS_VK_ENHANCED:
ke.dwControlKeyState |= ENHANCED_KEY
if ctrl_pressed:
ke.dwControlKeyState |= LEFT_CTRL_PRESSED
kc = INPUT_RECORD(KEY_EVENT)
kc.Event.KeyEvent = ke
list_input = li(kc)
# write input array
events_written = ctypes.c_int()
res = ctypes.windll.kernel32.WriteConsoleInputW(self.stdin, list_input, 1, ctypes.byref(events_written))
def close(self):
""" Close all running subproccesses """
# record status
self.is_alive = False
try:
stats = {'top_offset': 0, 'default_attribute': 0, 'cursor_x': 0, 'cursor_y': self.cursor_line, 'is_alive': 0}
self.shm_stats.write(stats)
except:
pass
pid_list = (ctypes.c_int * 10)()
num = ctypes.windll.kernel32.GetConsoleProcessList(pid_list, 10)
current_pid = os.getpid()
# kill subprocess pids
for pid in pid_list[0:num]:
if not pid:
break
# kill current pid last
if pid == current_pid:
continue
try:
self.close_pid(pid)
except:
pass
# kill this process
try:
self.close_pid(current_pid)
except:
pass
def close_pid(self, pid):
""" Terminate a single process. """
handle = ctypes.windll.kernel32.OpenProcess(PROCESS_TERMINATE, 0, pid)
ctypes.windll.kernel32.TerminateProcess(handle, -1)
ctypes.windll.kernel32.CloseHandle(handle)
def is_alive(self):
""" Check process health. """
status = ctypes.windll.kernel32.WaitForSingleObject(self.handle, 1)
if status == 0:
self.is_alive = False
return self.is_alive
def get_screen_text(self):
""" Return screen data as string. """
return "\n".join(self.data)
def set_window_size(self, width, height):
""" Change Windows console size. """
# get current window size object
window_size = SMALL_RECT(0, 0, 0, 0)
# buffer info has maximum window size data
buf_info = self.get_buffer_info()
# set top left corner
window_size.Top = 0
window_size.Left = 0
# set bottom right corner
if buf_info.dwMaximumWindowSize.X < width:
window_size.Right = buf_info.dwMaximumWindowSize.X - 1
else:
window_size.Right = width - 1
if buf_info.dwMaximumWindowSize.Y < height:
window_size.Bottom = buf_info.dwMaximumWindowSize.Y - 1
else:
window_size.Bottom = height - 1
# set the window size!
res = ctypes.windll.kernel32.SetConsoleWindowInfo(self.stdout, ctypes.c_bool(True), ctypes.byref(window_size))
# reread buffer info to get final console max lines
buf_info = self.get_buffer_info()
self.window_width = buf_info.srWindow.Right + 1
self.window_height = buf_info.srWindow.Bottom + 1
def get_buffer_info(self):
""" Retrieve commonly-used buffer information. """
buf_info = CONSOLE_SCREEN_BUFFER_INFO()
ctypes.windll.kernel32.GetConsoleScreenBufferInfo(self.stdout, ctypes.byref(buf_info))
return buf_info
| gpl-3.0 |
geffzhang/grpc | tools/run_tests/python_utils/watch_dirs.py | 11 | 2159 | # Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper to watch a (set) of directories for modifications."""
import os
import time
class DirWatcher(object):
"""Helper to watch a (set) of directories for modifications."""
def __init__(self, paths):
if isinstance(paths, basestring):
paths = [paths]
self._done = False
self.paths = list(paths)
self.lastrun = time.time()
self._cache = self._calculate()
def _calculate(self):
"""Walk over all subscribed paths, check most recent mtime."""
most_recent_change = None
for path in self.paths:
if not os.path.exists(path):
continue
if not os.path.isdir(path):
continue
for root, _, files in os.walk(path):
for f in files:
if f and f[0] == '.': continue
try:
st = os.stat(os.path.join(root, f))
except OSError as e:
if e.errno == os.errno.ENOENT:
continue
raise
if most_recent_change is None:
most_recent_change = st.st_mtime
else:
most_recent_change = max(most_recent_change,
st.st_mtime)
return most_recent_change
def most_recent_change(self):
if time.time() - self.lastrun > 1:
self._cache = self._calculate()
self.lastrun = time.time()
return self._cache
| apache-2.0 |
maestrano/odoo | addons/sale/edi/__init__.py | 454 | 1065 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Business Applications
# Copyright (c) 2011 OpenERP S.A. <http://openerp.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_order
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Noviat/odoo | addons/warning/__openerp__.py | 261 | 1603 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Warning Messages and Alerts',
'version': '1.0',
'category': 'Tools',
'description': """
Module to trigger warnings in OpenERP objects.
==============================================
Warning messages can be displayed for objects like sale order, purchase order,
picking and invoice. The message is triggered by the form's onchange event.
""",
'author': 'OpenERP SA',
'depends': ['base', 'sale_stock', 'purchase'],
'data': ['warning_view.xml'],
'demo': [],
'installable': True,
'auto_install': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
RenaudParis/servo | tests/wpt/css-tests/tools/webdriver/webdriver/command.py | 258 | 3985 | """Dispatches requests to remote WebDriver endpoint."""
import exceptions
import httplib
import json
import urlparse
import webelement
class CommandExecutor(object):
"""Dispatches requests to remote WebDriver endpoint."""
_HEADERS = {
"User-Agent": "Python WebDriver Local End",
"Content-Type": "application/json;charset=\"UTF-8\"",
"Accept": "application/json",
"Accept-Charset": "utf-8",
"Accept-Encoding": "identity",
"Connection": "close",
}
def __init__(self, url, mode='strict'):
self._parsed_url = urlparse.urlparse(url)
self._conn = httplib.HTTPConnection(self._parsed_url.hostname,
self._parsed_url.port)
self._mode = mode
def execute(self,
method,
path,
session_id,
name,
parameters=None,
object_hook=None):
"""Execute a command against the WebDriver endpoint.
Arguments:
method -- one of GET, POST, DELETE
path -- the path of the url endpoint (needs to include
session/<sessionId> if needed)
session_id -- the sessionId to include in the JSON body
name -- name of the command that is being executed to include in
the JSON body
parameters -- the JSON body to send with the command. Only used if
method is POST
object_hook -- function used by json.loads to properly deserialize
objects in the request
"""
if self._mode == 'strict':
return self._execute_strict(
method, path, session_id, name, parameters, object_hook)
elif self._mode == 'compatibility':
return self._execute_compatibility(
method, path, session_id, name, parameters, object_hook)
else:
raise Exception("Unknown mode: " + self._mode)
def _execute_compatibility(self,
method,
path,
session_id,
name,
parameters,
object_hook):
body = {'sessionId': session_id, 'name': name }
if parameters:
body.update(parameters)
self._conn.request(
method,
self._parsed_url.path + path,
json.dumps(body, default = self._json_encode).encode('utf-8'),
self._HEADERS)
resp = self._conn.getresponse()
data = resp.read().decode('utf-8')
if data:
data = json.loads(data, object_hook = object_hook)
if data['status'] != 0:
raise exceptions.create_webdriver_exception_compatibility(
data['status'], data['value']['message'])
return data
if resp.status < 200 or resp.status > 299:
raise exceptions.create_webdriver_exception_compatibility(
resp.status, resp.reason)
def _execute_strict(self,
method,
path,
session_id,
name,
parameters,
object_hook):
body = {
'sessionId': session_id,
'name': name,
'parameters': parameters }
self._conn.request(
method,
self._parsed_url.path + path,
json.dumps(body, default = self._json_encode).encode('utf-8'),
self._HEADERS)
resp = self._conn.getresponse()
data = json.loads(
resp.read().decode('utf-8'), object_hook = object_hook)
if data['status'] != 'success':
raise exceptions.create_webdriver_exception_strict(
data['status'], data['value'])
return data
def _json_encode(self, obj):
return obj.to_json()
| mpl-2.0 |
amurzeau/streamlink-debian | tests/plugin/testplugin.py | 2 | 2509 | from io import BytesIO
from streamlink import NoStreamsError
from streamlink.options import Options
from streamlink.plugin import PluginArgument, PluginArguments
from streamlink.plugins import Plugin
from streamlink.stream import AkamaiHDStream, HLSStream, HTTPStream, RTMPStream, Stream
class TestStream(Stream):
__shortname__ = "test"
def open(self):
return BytesIO(b'x' * 8192 * 2)
class TestPlugin(Plugin):
arguments = PluginArguments(
PluginArgument(
"bool",
action="store_true"
),
PluginArgument(
"password",
metavar="PASSWORD",
sensitive=True
)
)
options = Options({
"a_option": "default"
})
@classmethod
def can_handle_url(self, url):
return "test.se" in url
def get_title(self):
return "Test Title"
def get_author(self):
return "Tѥst Āuƭhǿr"
def get_category(self):
return None
def _get_streams(self):
if "empty" in self.url:
return
if "UnsortableStreamNames" in self.url:
def gen():
for i in range(3):
yield "vod", HTTPStream(self.session, "http://test.se/stream")
return gen()
if "NoStreamsError" in self.url:
raise NoStreamsError(self.url)
streams = {}
streams["test"] = TestStream(self.session)
streams["rtmp"] = RTMPStream(self.session, dict(rtmp="rtmp://test.se"))
streams["hls"] = HLSStream(self.session, "http://test.se/playlist.m3u8")
streams["http"] = HTTPStream(self.session, "http://test.se/stream")
streams["akamaihd"] = AkamaiHDStream(self.session, "http://test.se/stream")
streams["240p"] = HTTPStream(self.session, "http://test.se/stream")
streams["360p"] = HTTPStream(self.session, "http://test.se/stream")
streams["1080p"] = HTTPStream(self.session, "http://test.se/stream")
streams["350k"] = HTTPStream(self.session, "http://test.se/stream")
streams["800k"] = HTTPStream(self.session, "http://test.se/stream")
streams["1500k"] = HTTPStream(self.session, "http://test.se/stream")
streams["3000k"] = HTTPStream(self.session, "http://test.se/stream")
streams["480p"] = [HTTPStream(self.session, "http://test.se/stream"),
RTMPStream(self.session, dict(rtmp="rtmp://test.se"))]
return streams
__plugin__ = TestPlugin
| bsd-2-clause |
ZenDevelopmentSystems/scikit-learn | sklearn/linear_model/tests/test_sgd.py | 68 | 43439 | import pickle
import unittest
import numpy as np
import scipy.sparse as sp
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_almost_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import assert_less
from sklearn.utils.testing import raises
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_false, assert_true
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_raises_regexp
from sklearn import linear_model, datasets, metrics
from sklearn.base import clone
from sklearn.linear_model import SGDClassifier, SGDRegressor
from sklearn.preprocessing import LabelEncoder, scale, MinMaxScaler
class SparseSGDClassifier(SGDClassifier):
def fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return super(SparseSGDClassifier, self).fit(X, y, *args, **kw)
def partial_fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return super(SparseSGDClassifier, self).partial_fit(X, y, *args, **kw)
def decision_function(self, X):
X = sp.csr_matrix(X)
return super(SparseSGDClassifier, self).decision_function(X)
def predict_proba(self, X):
X = sp.csr_matrix(X)
return super(SparseSGDClassifier, self).predict_proba(X)
class SparseSGDRegressor(SGDRegressor):
def fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.fit(self, X, y, *args, **kw)
def partial_fit(self, X, y, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.partial_fit(self, X, y, *args, **kw)
def decision_function(self, X, *args, **kw):
X = sp.csr_matrix(X)
return SGDRegressor.decision_function(self, X, *args, **kw)
# Test Data
# test sample 1
X = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]])
Y = [1, 1, 1, 2, 2, 2]
T = np.array([[-1, -1], [2, 2], [3, 2]])
true_result = [1, 2, 2]
# test sample 2; string class labels
X2 = np.array([[-1, 1], [-0.75, 0.5], [-1.5, 1.5],
[1, 1], [0.75, 0.5], [1.5, 1.5],
[-1, -1], [0, -0.5], [1, -1]])
Y2 = ["one"] * 3 + ["two"] * 3 + ["three"] * 3
T2 = np.array([[-1.5, 0.5], [1, 2], [0, -2]])
true_result2 = ["one", "two", "three"]
# test sample 3
X3 = np.array([[1, 1, 0, 0, 0, 0], [1, 1, 0, 0, 0, 0],
[0, 0, 1, 0, 0, 0], [0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 1, 1], [0, 0, 0, 0, 1, 1],
[0, 0, 0, 1, 0, 0], [0, 0, 0, 1, 0, 0]])
Y3 = np.array([1, 1, 1, 1, 2, 2, 2, 2])
# test sample 4 - two more or less redundent feature groups
X4 = np.array([[1, 0.9, 0.8, 0, 0, 0], [1, .84, .98, 0, 0, 0],
[1, .96, .88, 0, 0, 0], [1, .91, .99, 0, 0, 0],
[0, 0, 0, .89, .91, 1], [0, 0, 0, .79, .84, 1],
[0, 0, 0, .91, .95, 1], [0, 0, 0, .93, 1, 1]])
Y4 = np.array([1, 1, 1, 1, 2, 2, 2, 2])
iris = datasets.load_iris()
# test sample 5 - test sample 1 as binary classification problem
X5 = np.array([[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1]])
Y5 = [1, 1, 1, 2, 2, 2]
true_result5 = [0, 1, 1]
# Classification Test Case
class CommonTest(object):
def factory(self, **kwargs):
if "random_state" not in kwargs:
kwargs["random_state"] = 42
return self.factory_class(**kwargs)
# a simple implementation of ASGD to use for testing
# uses squared loss to find the gradient
def asgd(self, X, y, eta, alpha, weight_init=None, intercept_init=0.0):
if weight_init is None:
weights = np.zeros(X.shape[1])
else:
weights = weight_init
average_weights = np.zeros(X.shape[1])
intercept = intercept_init
average_intercept = 0.0
decay = 1.0
# sparse data has a fixed decay of .01
if (isinstance(self, SparseSGDClassifierTestCase) or
isinstance(self, SparseSGDRegressorTestCase)):
decay = .01
for i, entry in enumerate(X):
p = np.dot(entry, weights)
p += intercept
gradient = p - y[i]
weights *= 1.0 - (eta * alpha)
weights += -(eta * gradient * entry)
intercept += -(eta * gradient) * decay
average_weights *= i
average_weights += weights
average_weights /= i + 1.0
average_intercept *= i
average_intercept += intercept
average_intercept /= i + 1.0
return average_weights, average_intercept
def _test_warm_start(self, X, Y, lr):
# Test that explicit warm restart...
clf = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False,
learning_rate=lr)
clf.fit(X, Y)
clf2 = self.factory(alpha=0.001, eta0=0.01, n_iter=5, shuffle=False,
learning_rate=lr)
clf2.fit(X, Y,
coef_init=clf.coef_.copy(),
intercept_init=clf.intercept_.copy())
# ... and implicit warm restart are equivalent.
clf3 = self.factory(alpha=0.01, eta0=0.01, n_iter=5, shuffle=False,
warm_start=True, learning_rate=lr)
clf3.fit(X, Y)
assert_equal(clf3.t_, clf.t_)
assert_array_almost_equal(clf3.coef_, clf.coef_)
clf3.set_params(alpha=0.001)
clf3.fit(X, Y)
assert_equal(clf3.t_, clf2.t_)
assert_array_almost_equal(clf3.coef_, clf2.coef_)
def test_warm_start_constant(self):
self._test_warm_start(X, Y, "constant")
def test_warm_start_invscaling(self):
self._test_warm_start(X, Y, "invscaling")
def test_warm_start_optimal(self):
self._test_warm_start(X, Y, "optimal")
def test_input_format(self):
# Input format tests.
clf = self.factory(alpha=0.01, n_iter=5,
shuffle=False)
clf.fit(X, Y)
Y_ = np.array(Y)[:, np.newaxis]
Y_ = np.c_[Y_, Y_]
assert_raises(ValueError, clf.fit, X, Y_)
def test_clone(self):
# Test whether clone works ok.
clf = self.factory(alpha=0.01, n_iter=5, penalty='l1')
clf = clone(clf)
clf.set_params(penalty='l2')
clf.fit(X, Y)
clf2 = self.factory(alpha=0.01, n_iter=5, penalty='l2')
clf2.fit(X, Y)
assert_array_equal(clf.coef_, clf2.coef_)
def test_plain_has_no_average_attr(self):
clf = self.factory(average=True, eta0=.01)
clf.fit(X, Y)
assert_true(hasattr(clf, 'average_coef_'))
assert_true(hasattr(clf, 'average_intercept_'))
assert_true(hasattr(clf, 'standard_intercept_'))
assert_true(hasattr(clf, 'standard_coef_'))
clf = self.factory()
clf.fit(X, Y)
assert_false(hasattr(clf, 'average_coef_'))
assert_false(hasattr(clf, 'average_intercept_'))
assert_false(hasattr(clf, 'standard_intercept_'))
assert_false(hasattr(clf, 'standard_coef_'))
def test_late_onset_averaging_not_reached(self):
clf1 = self.factory(average=600)
clf2 = self.factory()
for _ in range(100):
if isinstance(clf1, SGDClassifier):
clf1.partial_fit(X, Y, classes=np.unique(Y))
clf2.partial_fit(X, Y, classes=np.unique(Y))
else:
clf1.partial_fit(X, Y)
clf2.partial_fit(X, Y)
assert_array_almost_equal(clf1.coef_, clf2.coef_, decimal=16)
assert_almost_equal(clf1.intercept_, clf2.intercept_, decimal=16)
def test_late_onset_averaging_reached(self):
eta0 = .001
alpha = .0001
Y_encode = np.array(Y)
Y_encode[Y_encode == 1] = -1.0
Y_encode[Y_encode == 2] = 1.0
clf1 = self.factory(average=7, learning_rate="constant",
loss='squared_loss', eta0=eta0,
alpha=alpha, n_iter=2, shuffle=False)
clf2 = self.factory(average=0, learning_rate="constant",
loss='squared_loss', eta0=eta0,
alpha=alpha, n_iter=1, shuffle=False)
clf1.fit(X, Y_encode)
clf2.fit(X, Y_encode)
average_weights, average_intercept = \
self.asgd(X, Y_encode, eta0, alpha,
weight_init=clf2.coef_.ravel(),
intercept_init=clf2.intercept_)
assert_array_almost_equal(clf1.coef_.ravel(),
average_weights.ravel(),
decimal=16)
assert_almost_equal(clf1.intercept_, average_intercept, decimal=16)
class DenseSGDClassifierTestCase(unittest.TestCase, CommonTest):
"""Test suite for the dense representation variant of SGD"""
factory_class = SGDClassifier
def test_sgd(self):
# Check that SGD gives any results :-)
for loss in ("hinge", "squared_hinge", "log", "modified_huber"):
clf = self.factory(penalty='l2', alpha=0.01, fit_intercept=True,
loss=loss, n_iter=10, shuffle=True)
clf.fit(X, Y)
# assert_almost_equal(clf.coef_[0], clf.coef_[1], decimal=7)
assert_array_equal(clf.predict(T), true_result)
@raises(ValueError)
def test_sgd_bad_l1_ratio(self):
# Check whether expected ValueError on bad l1_ratio
self.factory(l1_ratio=1.1)
@raises(ValueError)
def test_sgd_bad_learning_rate_schedule(self):
# Check whether expected ValueError on bad learning_rate
self.factory(learning_rate="<unknown>")
@raises(ValueError)
def test_sgd_bad_eta0(self):
# Check whether expected ValueError on bad eta0
self.factory(eta0=0, learning_rate="constant")
@raises(ValueError)
def test_sgd_bad_alpha(self):
# Check whether expected ValueError on bad alpha
self.factory(alpha=-.1)
@raises(ValueError)
def test_sgd_bad_penalty(self):
# Check whether expected ValueError on bad penalty
self.factory(penalty='foobar', l1_ratio=0.85)
@raises(ValueError)
def test_sgd_bad_loss(self):
# Check whether expected ValueError on bad loss
self.factory(loss="foobar")
@raises(ValueError)
def test_sgd_n_iter_param(self):
# Test parameter validity check
self.factory(n_iter=-10000)
@raises(ValueError)
def test_sgd_shuffle_param(self):
# Test parameter validity check
self.factory(shuffle="false")
@raises(TypeError)
def test_argument_coef(self):
# Checks coef_init not allowed as model argument (only fit)
# Provided coef_ does not match dataset.
self.factory(coef_init=np.zeros((3,))).fit(X, Y)
@raises(ValueError)
def test_provide_coef(self):
# Checks coef_init shape for the warm starts
# Provided coef_ does not match dataset.
self.factory().fit(X, Y, coef_init=np.zeros((3,)))
@raises(ValueError)
def test_set_intercept(self):
# Checks intercept_ shape for the warm starts
# Provided intercept_ does not match dataset.
self.factory().fit(X, Y, intercept_init=np.zeros((3,)))
def test_set_intercept_binary(self):
# Checks intercept_ shape for the warm starts in binary case
self.factory().fit(X5, Y5, intercept_init=0)
def test_average_binary_computed_correctly(self):
# Checks the SGDClassifier correctly computes the average weights
eta = .1
alpha = 2.
n_samples = 20
n_features = 10
rng = np.random.RandomState(0)
X = rng.normal(size=(n_samples, n_features))
w = rng.normal(size=n_features)
clf = self.factory(loss='squared_loss',
learning_rate='constant',
eta0=eta, alpha=alpha,
fit_intercept=True,
n_iter=1, average=True, shuffle=False)
# simple linear function without noise
y = np.dot(X, w)
y = np.sign(y)
clf.fit(X, y)
average_weights, average_intercept = self.asgd(X, y, eta, alpha)
average_weights = average_weights.reshape(1, -1)
assert_array_almost_equal(clf.coef_,
average_weights,
decimal=14)
assert_almost_equal(clf.intercept_, average_intercept, decimal=14)
def test_set_intercept_to_intercept(self):
# Checks intercept_ shape consistency for the warm starts
# Inconsistent intercept_ shape.
clf = self.factory().fit(X5, Y5)
self.factory().fit(X5, Y5, intercept_init=clf.intercept_)
clf = self.factory().fit(X, Y)
self.factory().fit(X, Y, intercept_init=clf.intercept_)
@raises(ValueError)
def test_sgd_at_least_two_labels(self):
# Target must have at least two labels
self.factory(alpha=0.01, n_iter=20).fit(X2, np.ones(9))
def test_partial_fit_weight_class_balanced(self):
# partial_fit with class_weight='balanced' not supported"""
assert_raises_regexp(ValueError,
"class_weight 'balanced' is not supported for "
"partial_fit. In order to use 'balanced' weights, "
"use compute_class_weight\('balanced', classes, y\). "
"In place of y you can us a large enough sample "
"of the full training set target to properly "
"estimate the class frequency distributions. "
"Pass the resulting weights as the class_weight "
"parameter.",
self.factory(class_weight='balanced').partial_fit,
X, Y, classes=np.unique(Y))
def test_sgd_multiclass(self):
# Multi-class test case
clf = self.factory(alpha=0.01, n_iter=20).fit(X2, Y2)
assert_equal(clf.coef_.shape, (3, 2))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([[0, 0]]).shape, (1, 3))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_sgd_multiclass_average(self):
eta = .001
alpha = .01
# Multi-class average test case
clf = self.factory(loss='squared_loss',
learning_rate='constant',
eta0=eta, alpha=alpha,
fit_intercept=True,
n_iter=1, average=True, shuffle=False)
np_Y2 = np.array(Y2)
clf.fit(X2, np_Y2)
classes = np.unique(np_Y2)
for i, cl in enumerate(classes):
y_i = np.ones(np_Y2.shape[0])
y_i[np_Y2 != cl] = -1
average_coef, average_intercept = self.asgd(X2, y_i, eta, alpha)
assert_array_almost_equal(average_coef, clf.coef_[i], decimal=16)
assert_almost_equal(average_intercept,
clf.intercept_[i],
decimal=16)
def test_sgd_multiclass_with_init_coef(self):
# Multi-class test case
clf = self.factory(alpha=0.01, n_iter=20)
clf.fit(X2, Y2, coef_init=np.zeros((3, 2)),
intercept_init=np.zeros(3))
assert_equal(clf.coef_.shape, (3, 2))
assert_true(clf.intercept_.shape, (3,))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_sgd_multiclass_njobs(self):
# Multi-class test case with multi-core support
clf = self.factory(alpha=0.01, n_iter=20, n_jobs=2).fit(X2, Y2)
assert_equal(clf.coef_.shape, (3, 2))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([[0, 0]]).shape, (1, 3))
pred = clf.predict(T2)
assert_array_equal(pred, true_result2)
def test_set_coef_multiclass(self):
# Checks coef_init and intercept_init shape for for multi-class
# problems
# Provided coef_ does not match dataset
clf = self.factory()
assert_raises(ValueError, clf.fit, X2, Y2, coef_init=np.zeros((2, 2)))
# Provided coef_ does match dataset
clf = self.factory().fit(X2, Y2, coef_init=np.zeros((3, 2)))
# Provided intercept_ does not match dataset
clf = self.factory()
assert_raises(ValueError, clf.fit, X2, Y2,
intercept_init=np.zeros((1,)))
# Provided intercept_ does match dataset.
clf = self.factory().fit(X2, Y2, intercept_init=np.zeros((3,)))
def test_sgd_proba(self):
# Check SGD.predict_proba
# Hinge loss does not allow for conditional prob estimate.
# We cannot use the factory here, because it defines predict_proba
# anyway.
clf = SGDClassifier(loss="hinge", alpha=0.01, n_iter=10).fit(X, Y)
assert_false(hasattr(clf, "predict_proba"))
assert_false(hasattr(clf, "predict_log_proba"))
# log and modified_huber losses can output probability estimates
# binary case
for loss in ["log", "modified_huber"]:
clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10)
clf.fit(X, Y)
p = clf.predict_proba([[3, 2]])
assert_true(p[0, 1] > 0.5)
p = clf.predict_proba([[-1, -1]])
assert_true(p[0, 1] < 0.5)
p = clf.predict_log_proba([[3, 2]])
assert_true(p[0, 1] > p[0, 0])
p = clf.predict_log_proba([[-1, -1]])
assert_true(p[0, 1] < p[0, 0])
# log loss multiclass probability estimates
clf = self.factory(loss="log", alpha=0.01, n_iter=10).fit(X2, Y2)
d = clf.decision_function([[.1, -.1], [.3, .2]])
p = clf.predict_proba([[.1, -.1], [.3, .2]])
assert_array_equal(np.argmax(p, axis=1), np.argmax(d, axis=1))
assert_almost_equal(p[0].sum(), 1)
assert_true(np.all(p[0] >= 0))
p = clf.predict_proba([[-1, -1]])
d = clf.decision_function([[-1, -1]])
assert_array_equal(np.argsort(p[0]), np.argsort(d[0]))
l = clf.predict_log_proba([[3, 2]])
p = clf.predict_proba([[3, 2]])
assert_array_almost_equal(np.log(p), l)
l = clf.predict_log_proba([[-1, -1]])
p = clf.predict_proba([[-1, -1]])
assert_array_almost_equal(np.log(p), l)
# Modified Huber multiclass probability estimates; requires a separate
# test because the hard zero/one probabilities may destroy the
# ordering present in decision_function output.
clf = self.factory(loss="modified_huber", alpha=0.01, n_iter=10)
clf.fit(X2, Y2)
d = clf.decision_function([[3, 2]])
p = clf.predict_proba([[3, 2]])
if not isinstance(self, SparseSGDClassifierTestCase):
assert_equal(np.argmax(d, axis=1), np.argmax(p, axis=1))
else: # XXX the sparse test gets a different X2 (?)
assert_equal(np.argmin(d, axis=1), np.argmin(p, axis=1))
# the following sample produces decision_function values < -1,
# which would cause naive normalization to fail (see comment
# in SGDClassifier.predict_proba)
x = X.mean(axis=0)
d = clf.decision_function([x])
if np.all(d < -1): # XXX not true in sparse test case (why?)
p = clf.predict_proba([x])
assert_array_almost_equal(p[0], [1 / 3.] * 3)
def test_sgd_l1(self):
# Test L1 regularization
n = len(X4)
rng = np.random.RandomState(13)
idx = np.arange(n)
rng.shuffle(idx)
X = X4[idx, :]
Y = Y4[idx]
clf = self.factory(penalty='l1', alpha=.2, fit_intercept=False,
n_iter=2000, shuffle=False)
clf.fit(X, Y)
assert_array_equal(clf.coef_[0, 1:-1], np.zeros((4,)))
pred = clf.predict(X)
assert_array_equal(pred, Y)
# test sparsify with dense inputs
clf.sparsify()
assert_true(sp.issparse(clf.coef_))
pred = clf.predict(X)
assert_array_equal(pred, Y)
# pickle and unpickle with sparse coef_
clf = pickle.loads(pickle.dumps(clf))
assert_true(sp.issparse(clf.coef_))
pred = clf.predict(X)
assert_array_equal(pred, Y)
def test_class_weights(self):
# Test class weights.
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False,
class_weight=None)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False,
class_weight={1: 0.001})
clf.fit(X, y)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
def test_equal_class_weight(self):
# Test if equal class weights approx. equals no class weights.
X = [[1, 0], [1, 0], [0, 1], [0, 1]]
y = [0, 0, 1, 1]
clf = self.factory(alpha=0.1, n_iter=1000, class_weight=None)
clf.fit(X, y)
X = [[1, 0], [0, 1]]
y = [0, 1]
clf_weighted = self.factory(alpha=0.1, n_iter=1000,
class_weight={0: 0.5, 1: 0.5})
clf_weighted.fit(X, y)
# should be similar up to some epsilon due to learning rate schedule
assert_almost_equal(clf.coef_, clf_weighted.coef_, decimal=2)
@raises(ValueError)
def test_wrong_class_weight_label(self):
# ValueError due to not existing class label.
clf = self.factory(alpha=0.1, n_iter=1000, class_weight={0: 0.5})
clf.fit(X, Y)
@raises(ValueError)
def test_wrong_class_weight_format(self):
# ValueError due to wrong class_weight argument type.
clf = self.factory(alpha=0.1, n_iter=1000, class_weight=[0.5])
clf.fit(X, Y)
def test_weights_multiplied(self):
# Tests that class_weight and sample_weight are multiplicative
class_weights = {1: .6, 2: .3}
sample_weights = np.random.random(Y4.shape[0])
multiplied_together = np.copy(sample_weights)
multiplied_together[Y4 == 1] *= class_weights[1]
multiplied_together[Y4 == 2] *= class_weights[2]
clf1 = self.factory(alpha=0.1, n_iter=20, class_weight=class_weights)
clf2 = self.factory(alpha=0.1, n_iter=20)
clf1.fit(X4, Y4, sample_weight=sample_weights)
clf2.fit(X4, Y4, sample_weight=multiplied_together)
assert_almost_equal(clf1.coef_, clf2.coef_)
def test_balanced_weight(self):
# Test class weights for imbalanced data"""
# compute reference metrics on iris dataset that is quite balanced by
# default
X, y = iris.data, iris.target
X = scale(X)
idx = np.arange(X.shape[0])
rng = np.random.RandomState(6)
rng.shuffle(idx)
X = X[idx]
y = y[idx]
clf = self.factory(alpha=0.0001, n_iter=1000,
class_weight=None, shuffle=False).fit(X, y)
assert_almost_equal(metrics.f1_score(y, clf.predict(X), average='weighted'), 0.96,
decimal=1)
# make the same prediction using balanced class_weight
clf_balanced = self.factory(alpha=0.0001, n_iter=1000,
class_weight="balanced",
shuffle=False).fit(X, y)
assert_almost_equal(metrics.f1_score(y, clf_balanced.predict(X), average='weighted'), 0.96,
decimal=1)
# Make sure that in the balanced case it does not change anything
# to use "balanced"
assert_array_almost_equal(clf.coef_, clf_balanced.coef_, 6)
# build an very very imbalanced dataset out of iris data
X_0 = X[y == 0, :]
y_0 = y[y == 0]
X_imbalanced = np.vstack([X] + [X_0] * 10)
y_imbalanced = np.concatenate([y] + [y_0] * 10)
# fit a model on the imbalanced data without class weight info
clf = self.factory(n_iter=1000, class_weight=None, shuffle=False)
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_less(metrics.f1_score(y, y_pred, average='weighted'), 0.96)
# fit a model with balanced class_weight enabled
clf = self.factory(n_iter=1000, class_weight="balanced", shuffle=False)
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_greater(metrics.f1_score(y, y_pred, average='weighted'), 0.96)
# fit another using a fit parameter override
clf = self.factory(n_iter=1000, class_weight="balanced", shuffle=False)
clf.fit(X_imbalanced, y_imbalanced)
y_pred = clf.predict(X)
assert_greater(metrics.f1_score(y, y_pred, average='weighted'), 0.96)
def test_sample_weights(self):
# Test weights on individual samples
X = np.array([[-1.0, -1.0], [-1.0, 0], [-.8, -1.0],
[1.0, 1.0], [1.0, 0.0]])
y = [1, 1, 1, -1, -1]
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False)
clf.fit(X, y)
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([1]))
# we give a small weights to class 1
clf.fit(X, y, sample_weight=[0.001] * 3 + [1] * 2)
# now the hyperplane should rotate clock-wise and
# the prediction on this point should shift
assert_array_equal(clf.predict([[0.2, -1.0]]), np.array([-1]))
@raises(ValueError)
def test_wrong_sample_weights(self):
# Test if ValueError is raised if sample_weight has wrong shape
clf = self.factory(alpha=0.1, n_iter=1000, fit_intercept=False)
# provided sample_weight too long
clf.fit(X, Y, sample_weight=np.arange(7))
@raises(ValueError)
def test_partial_fit_exception(self):
clf = self.factory(alpha=0.01)
# classes was not specified
clf.partial_fit(X3, Y3)
def test_partial_fit_binary(self):
third = X.shape[0] // 3
clf = self.factory(alpha=0.01)
classes = np.unique(Y)
clf.partial_fit(X[:third], Y[:third], classes=classes)
assert_equal(clf.coef_.shape, (1, X.shape[1]))
assert_equal(clf.intercept_.shape, (1,))
assert_equal(clf.decision_function([[0, 0]]).shape, (1, ))
id1 = id(clf.coef_.data)
clf.partial_fit(X[third:], Y[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
y_pred = clf.predict(T)
assert_array_equal(y_pred, true_result)
def test_partial_fit_multiclass(self):
third = X2.shape[0] // 3
clf = self.factory(alpha=0.01)
classes = np.unique(Y2)
clf.partial_fit(X2[:third], Y2[:third], classes=classes)
assert_equal(clf.coef_.shape, (3, X2.shape[1]))
assert_equal(clf.intercept_.shape, (3,))
assert_equal(clf.decision_function([[0, 0]]).shape, (1, 3))
id1 = id(clf.coef_.data)
clf.partial_fit(X2[third:], Y2[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
def test_fit_then_partial_fit(self):
# Partial_fit should work after initial fit in the multiclass case.
# Non-regression test for #2496; fit would previously produce a
# Fortran-ordered coef_ that subsequent partial_fit couldn't handle.
clf = self.factory()
clf.fit(X2, Y2)
clf.partial_fit(X2, Y2) # no exception here
def _test_partial_fit_equal_fit(self, lr):
for X_, Y_, T_ in ((X, Y, T), (X2, Y2, T2)):
clf = self.factory(alpha=0.01, eta0=0.01, n_iter=2,
learning_rate=lr, shuffle=False)
clf.fit(X_, Y_)
y_pred = clf.decision_function(T_)
t = clf.t_
classes = np.unique(Y_)
clf = self.factory(alpha=0.01, eta0=0.01, learning_rate=lr,
shuffle=False)
for i in range(2):
clf.partial_fit(X_, Y_, classes=classes)
y_pred2 = clf.decision_function(T_)
assert_equal(clf.t_, t)
assert_array_almost_equal(y_pred, y_pred2, decimal=2)
def test_partial_fit_equal_fit_constant(self):
self._test_partial_fit_equal_fit("constant")
def test_partial_fit_equal_fit_optimal(self):
self._test_partial_fit_equal_fit("optimal")
def test_partial_fit_equal_fit_invscaling(self):
self._test_partial_fit_equal_fit("invscaling")
def test_regression_losses(self):
clf = self.factory(alpha=0.01, learning_rate="constant",
eta0=0.1, loss="epsilon_insensitive")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, learning_rate="constant",
eta0=0.1, loss="squared_epsilon_insensitive")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, loss="huber")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
clf = self.factory(alpha=0.01, learning_rate="constant", eta0=0.01,
loss="squared_loss")
clf.fit(X, Y)
assert_equal(1.0, np.mean(clf.predict(X) == Y))
def test_warm_start_multiclass(self):
self._test_warm_start(X2, Y2, "optimal")
def test_multiple_fit(self):
# Test multiple calls of fit w/ different shaped inputs.
clf = self.factory(alpha=0.01, n_iter=5,
shuffle=False)
clf.fit(X, Y)
assert_true(hasattr(clf, "coef_"))
# Non-regression test: try fitting with a different label set.
y = [["ham", "spam"][i] for i in LabelEncoder().fit_transform(Y)]
clf.fit(X[:, :-1], y)
class SparseSGDClassifierTestCase(DenseSGDClassifierTestCase):
"""Run exactly the same tests using the sparse representation variant"""
factory_class = SparseSGDClassifier
###############################################################################
# Regression Test Case
class DenseSGDRegressorTestCase(unittest.TestCase, CommonTest):
"""Test suite for the dense representation variant of SGD"""
factory_class = SGDRegressor
def test_sgd(self):
# Check that SGD gives any results.
clf = self.factory(alpha=0.1, n_iter=2,
fit_intercept=False)
clf.fit([[0, 0], [1, 1], [2, 2]], [0, 1, 2])
assert_equal(clf.coef_[0], clf.coef_[1])
@raises(ValueError)
def test_sgd_bad_penalty(self):
# Check whether expected ValueError on bad penalty
self.factory(penalty='foobar', l1_ratio=0.85)
@raises(ValueError)
def test_sgd_bad_loss(self):
# Check whether expected ValueError on bad loss
self.factory(loss="foobar")
def test_sgd_averaged_computed_correctly(self):
# Tests the average regressor matches the naive implementation
eta = .001
alpha = .01
n_samples = 20
n_features = 10
rng = np.random.RandomState(0)
X = rng.normal(size=(n_samples, n_features))
w = rng.normal(size=n_features)
# simple linear function without noise
y = np.dot(X, w)
clf = self.factory(loss='squared_loss',
learning_rate='constant',
eta0=eta, alpha=alpha,
fit_intercept=True,
n_iter=1, average=True, shuffle=False)
clf.fit(X, y)
average_weights, average_intercept = self.asgd(X, y, eta, alpha)
assert_array_almost_equal(clf.coef_,
average_weights,
decimal=16)
assert_almost_equal(clf.intercept_, average_intercept, decimal=16)
def test_sgd_averaged_partial_fit(self):
# Tests whether the partial fit yields the same average as the fit
eta = .001
alpha = .01
n_samples = 20
n_features = 10
rng = np.random.RandomState(0)
X = rng.normal(size=(n_samples, n_features))
w = rng.normal(size=n_features)
# simple linear function without noise
y = np.dot(X, w)
clf = self.factory(loss='squared_loss',
learning_rate='constant',
eta0=eta, alpha=alpha,
fit_intercept=True,
n_iter=1, average=True, shuffle=False)
clf.partial_fit(X[:int(n_samples / 2)][:], y[:int(n_samples / 2)])
clf.partial_fit(X[int(n_samples / 2):][:], y[int(n_samples / 2):])
average_weights, average_intercept = self.asgd(X, y, eta, alpha)
assert_array_almost_equal(clf.coef_,
average_weights,
decimal=16)
assert_almost_equal(clf.intercept_[0], average_intercept, decimal=16)
def test_average_sparse(self):
# Checks the average weights on data with 0s
eta = .001
alpha = .01
clf = self.factory(loss='squared_loss',
learning_rate='constant',
eta0=eta, alpha=alpha,
fit_intercept=True,
n_iter=1, average=True, shuffle=False)
n_samples = Y3.shape[0]
clf.partial_fit(X3[:int(n_samples / 2)][:], Y3[:int(n_samples / 2)])
clf.partial_fit(X3[int(n_samples / 2):][:], Y3[int(n_samples / 2):])
average_weights, average_intercept = self.asgd(X3, Y3, eta, alpha)
assert_array_almost_equal(clf.coef_,
average_weights,
decimal=16)
assert_almost_equal(clf.intercept_, average_intercept, decimal=16)
def test_sgd_least_squares_fit(self):
xmin, xmax = -5, 5
n_samples = 100
rng = np.random.RandomState(0)
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel()
clf = self.factory(loss='squared_loss', alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.5)
def test_sgd_epsilon_insensitive(self):
xmin, xmax = -5, 5
n_samples = 100
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss='epsilon_insensitive', epsilon=0.01,
alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_true(score > 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() \
+ np.random.randn(n_samples, 1).ravel()
clf = self.factory(loss='epsilon_insensitive', epsilon=0.01,
alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_true(score > 0.5)
def test_sgd_huber_fit(self):
xmin, xmax = -5, 5
n_samples = 100
rng = np.random.RandomState(0)
X = np.linspace(xmin, xmax, n_samples).reshape(n_samples, 1)
# simple linear function without noise
y = 0.5 * X.ravel()
clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.99)
# simple linear function with noise
y = 0.5 * X.ravel() + rng.randn(n_samples, 1).ravel()
clf = self.factory(loss="huber", epsilon=0.1, alpha=0.1, n_iter=20,
fit_intercept=False)
clf.fit(X, y)
score = clf.score(X, y)
assert_greater(score, 0.5)
def test_elasticnet_convergence(self):
# Check that the SGD output is consistent with coordinate descent
n_samples, n_features = 1000, 5
rng = np.random.RandomState(0)
X = np.random.randn(n_samples, n_features)
# ground_truth linear model that generate y from X and to which the
# models should converge if the regularizer would be set to 0.0
ground_truth_coef = rng.randn(n_features)
y = np.dot(X, ground_truth_coef)
# XXX: alpha = 0.1 seems to cause convergence problems
for alpha in [0.01, 0.001]:
for l1_ratio in [0.5, 0.8, 1.0]:
cd = linear_model.ElasticNet(alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=False)
cd.fit(X, y)
sgd = self.factory(penalty='elasticnet', n_iter=50,
alpha=alpha, l1_ratio=l1_ratio,
fit_intercept=False)
sgd.fit(X, y)
err_msg = ("cd and sgd did not converge to comparable "
"results for alpha=%f and l1_ratio=%f"
% (alpha, l1_ratio))
assert_almost_equal(cd.coef_, sgd.coef_, decimal=2,
err_msg=err_msg)
def test_partial_fit(self):
third = X.shape[0] // 3
clf = self.factory(alpha=0.01)
clf.partial_fit(X[:third], Y[:third])
assert_equal(clf.coef_.shape, (X.shape[1], ))
assert_equal(clf.intercept_.shape, (1,))
assert_equal(clf.decision_function([[0, 0]]).shape, (1, ))
id1 = id(clf.coef_.data)
clf.partial_fit(X[third:], Y[third:])
id2 = id(clf.coef_.data)
# check that coef_ haven't been re-allocated
assert_true(id1, id2)
def _test_partial_fit_equal_fit(self, lr):
clf = self.factory(alpha=0.01, n_iter=2, eta0=0.01,
learning_rate=lr, shuffle=False)
clf.fit(X, Y)
y_pred = clf.predict(T)
t = clf.t_
clf = self.factory(alpha=0.01, eta0=0.01,
learning_rate=lr, shuffle=False)
for i in range(2):
clf.partial_fit(X, Y)
y_pred2 = clf.predict(T)
assert_equal(clf.t_, t)
assert_array_almost_equal(y_pred, y_pred2, decimal=2)
def test_partial_fit_equal_fit_constant(self):
self._test_partial_fit_equal_fit("constant")
def test_partial_fit_equal_fit_optimal(self):
self._test_partial_fit_equal_fit("optimal")
def test_partial_fit_equal_fit_invscaling(self):
self._test_partial_fit_equal_fit("invscaling")
def test_loss_function_epsilon(self):
clf = self.factory(epsilon=0.9)
clf.set_params(epsilon=0.1)
assert clf.loss_functions['huber'][1] == 0.1
class SparseSGDRegressorTestCase(DenseSGDRegressorTestCase):
# Run exactly the same tests using the sparse representation variant
factory_class = SparseSGDRegressor
def test_l1_ratio():
# Test if l1 ratio extremes match L1 and L2 penalty settings.
X, y = datasets.make_classification(n_samples=1000,
n_features=100, n_informative=20,
random_state=1234)
# test if elasticnet with l1_ratio near 1 gives same result as pure l1
est_en = SGDClassifier(alpha=0.001, penalty='elasticnet',
l1_ratio=0.9999999999, random_state=42).fit(X, y)
est_l1 = SGDClassifier(alpha=0.001, penalty='l1', random_state=42).fit(X, y)
assert_array_almost_equal(est_en.coef_, est_l1.coef_)
# test if elasticnet with l1_ratio near 0 gives same result as pure l2
est_en = SGDClassifier(alpha=0.001, penalty='elasticnet',
l1_ratio=0.0000000001, random_state=42).fit(X, y)
est_l2 = SGDClassifier(alpha=0.001, penalty='l2', random_state=42).fit(X, y)
assert_array_almost_equal(est_en.coef_, est_l2.coef_)
def test_underflow_or_overlow():
with np.errstate(all='raise'):
# Generate some weird data with hugely unscaled features
rng = np.random.RandomState(0)
n_samples = 100
n_features = 10
X = rng.normal(size=(n_samples, n_features))
X[:, :2] *= 1e300
assert_true(np.isfinite(X).all())
# Use MinMaxScaler to scale the data without introducing a numerical
# instability (computing the standard deviation naively is not possible
# on this data)
X_scaled = MinMaxScaler().fit_transform(X)
assert_true(np.isfinite(X_scaled).all())
# Define a ground truth on the scaled data
ground_truth = rng.normal(size=n_features)
y = (np.dot(X_scaled, ground_truth) > 0.).astype(np.int32)
assert_array_equal(np.unique(y), [0, 1])
model = SGDClassifier(alpha=0.1, loss='squared_hinge', n_iter=500)
# smoke test: model is stable on scaled data
model.fit(X_scaled, y)
assert_true(np.isfinite(model.coef_).all())
# model is numerically unstable on unscaled data
msg_regxp = (r"Floating-point under-/overflow occurred at epoch #.*"
" Scaling input data with StandardScaler or MinMaxScaler"
" might help.")
assert_raises_regexp(ValueError, msg_regxp, model.fit, X, y)
def test_numerical_stability_large_gradient():
# Non regression test case for numerical stability on scaled problems
# where the gradient can still explode with some losses
model = SGDClassifier(loss='squared_hinge', n_iter=10, shuffle=True,
penalty='elasticnet', l1_ratio=0.3, alpha=0.01,
eta0=0.001, random_state=0)
with np.errstate(all='raise'):
model.fit(iris.data, iris.target)
assert_true(np.isfinite(model.coef_).all())
def test_large_regularization():
# Non regression tests for numerical stability issues caused by large
# regularization parameters
for penalty in ['l2', 'l1', 'elasticnet']:
model = SGDClassifier(alpha=1e5, learning_rate='constant', eta0=0.1,
n_iter=5, penalty=penalty, shuffle=False)
with np.errstate(all='raise'):
model.fit(iris.data, iris.target)
assert_array_almost_equal(model.coef_, np.zeros_like(model.coef_))
| bsd-3-clause |
SP2RC-Coding-Club/Codes | 13_07_2017/3D_slab_modes.py | 1 | 35096 |
#import pdb # pause code for debugging at pdb.set_trace()
import numpy as np
import toolbox as tool
import slab_functions as sf
from pysac.plot.mayavi_seed_streamlines import SeedStreamline
import matplotlib.pyplot as plt
from mayavi import mlab
import gc
#import move_seed_points as msp
import mayavi_plotting_functions as mpf
import dispersion_diagram
import img2vid as i2v
from functools import partial
import os
# ================================
# Preamble: set mode options and view parameters
# ================================
# What mode do you want? OPTIONS:
mode_options = ['slow-kink-surf', 'slow-saus-surf', 'slow-saus-body-3',
'slow-kink-body-3', 'slow-saus-body-2', 'slow-kink-body-2',
'slow-saus-body-1', 'slow-kink-body-1', 'fast-saus-body-1',
'fast-kink-body-1', 'fast-saus-body-2', 'fast-kink-body-2',
'fast-saus-body-3', 'fast-kink-body-3', 'fast-kink-surf',
'fast-saus-surf', 'shear-alfven', 'shear-alfven-broadband']
# Which angle shall we view from? OPTIONS:
view_options = ['front', 'front-parallel', 'top', 'top-parallel', 'front-top',
'front-side', 'front-top-side']
# Uniform lighting?
#uniform_light = True
uniform_light = False
show_density = False
show_density_pert = False
show_mag = False
show_mag_scale = False
show_mag_fade = False
show_mag_vec = False
show_vel_front = False
show_vel_front_pert = False
show_vel_top = False
show_vel_top_pert = False
show_disp_top = False
show_disp_front = False
show_axes = False
show_axis_labels = False
show_mini_axis = False
show_boundary = False
# Uncomment the parametrer you would like to see
# No density perturbations or vel/disp pert for alfven modes.
#show_density = True
#show_density_pert = True
show_mag = True
#show_mag_scale = True #must also have show_mag = True
#show_mag_fade = True
#show_mag_vec = True
#show_vel_front = True
#show_vel_front_pert = True
#show_vel_top = True
#show_vel_top_pert = True
#show_disp_top = True
#show_disp_front = True
show_axes = True
#show_axis_labels = True
show_mini_axis = True
show_boundary = True
# Visualisation modules in string form for file-names
vis_modules = [show_density, show_density_pert, show_mag, show_mag_scale,
show_mag_fade, show_mag_vec, show_vel_front, show_vel_front_pert,
show_vel_top, show_vel_top_pert, show_disp_top, show_disp_front]
vis_modules_strings = ['show_density', 'show_density_pert', 'show_mag', 'show_mag_scale',
'show_mag_fade', 'show_mag_vec', 'show_vel_front', 'show_vel_front_pert',
'show_vel_top', 'show_vel_top_pert', 'show_disp_top', 'show_disp_front']
vis_mod_string = ''
for i, j in enumerate(vis_modules):
if vis_modules[i]:
vis_mod_string = vis_mod_string + vis_modules_strings[i][5:] + '_'
# Set to True if you would like the dispersion diagram with chosen mode highlighted.
show_dispersion = False
#show_dispersion = True
# Wanna see the animation? Of course you do
#show_animation = False
show_animation = True
# Basic plot to see which eigensolutions have been found.
show_quick_plot = False
#show_quick_plot = True
# Video resolution
#res = (1920,1080) # There is a problem with this resolution- height must be odd number - Mayavi bug apparently
res = tuple(101 * np.array((16,9)))
#res = tuple(51 * np.array((16,9)))
#res = tuple(21 * np.array((16,9)))
number_of_frames = 1
# Frames per second of output video
fps = 20
#save_images = False
save_images = True
make_video = False
#make_video = True
# Where should I save the animation images/videos?
os.path.abspath(os.curdir)
os.chdir('..')
save_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_animations')
# Where should I save the dispersion diagrams?
save_dispersion_diagram_directory = os.path.join(os.path.abspath(os.curdir), '3D_vis_dispersion_diagrams')
# ================================
# Visualisation set-up
# ================================
# Variable definitions (for reference):
# x = k*x
# y = k*y
# z = k*z
# W = omega/k
# K = k*x_0
# t = omega*t
# Loop through selected modes
for mode_ind in [0]:#range(8,14): # for all others. REMEMBER SBB pparameters
#for mode_ind in [14,15]: #for fast body surf. REMEMBER SBS parameters
#for mode_ind in [16, 17]:
#for mode_ind in [13]: #for an individual mode
#for mode_ind in range(2,14):
if mode_ind not in range(len(mode_options)):
raise NameError('Mode not in mode_options')
# (note that fast surface modes, i.e. 14 and 15, can only be
# found with SBS parameters in slab_functions...)
mode = mode_options[mode_ind]
# Specify oscillation parameters
if 'slow' in mode and 'surf' in mode or 'alfven' in mode:
K = 2.
elif 'slow' in mode and 'body' in mode:
K = 8.
elif 'fast' in mode and 'body-1' in mode:
K = 8.
elif 'fast' in mode and 'body-2' in mode:
K = 15.
elif 'fast' in mode and 'body-3' in mode:
K = 22.
elif 'fast' in mode and 'surf' in mode:
K = 8.
else:
raise NameError('Mode not found')
# Specify density ratio R1 := rho_1 / rho_0
# R1 = 1.5 # Higher denisty on left than right
# R1 = 1.8
# R1 = 1.9 # Disp_diagram will only work for R1=1.5, 1.8, 2.0
R1 = 2. # Symmetric slab
# Reduce number of variables in dispersion relation
disp_rel_partial = partial(sf.disp_rel_asym, R1=R1)
# find eigenfrequencies W (= omega/k) within the range Wrange for the given parameters.
Wrange1 = np.linspace(0., sf.cT, 11)
Wrange2 = np.linspace(sf.cT, sf.c0, 401)
Wrange3 = np.linspace(sf.c0, sf.c2, 11)
Woptions_slow_surf = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange1, args=None).transpose())
Woptions_slow_body = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange2, args=None).transpose())
Woptions_fast = np.real(tool.point_find(disp_rel_partial, np.array(K), Wrange3, args=None).transpose())
# Remove W values that are very close to characteristic speeds - these are spurious solutions
tol = 1e-2
indices_to_rm = []
for i, w in enumerate(Woptions_slow_surf):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < 0 or w > sf.cT:
indices_to_rm.append(i)
Woptions_slow_surf = np.delete(Woptions_slow_surf, indices_to_rm)
Woptions_slow_surf.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_slow_body):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.cT or w > sf.c0:
indices_to_rm.append(i)
Woptions_slow_body = np.delete(Woptions_slow_body, indices_to_rm)
Woptions_slow_body.sort()
indices_to_rm = []
for i, w in enumerate(Woptions_fast):
spurious_roots_diff = abs(np.array([w, w - sf.c0, w - sf.c1(R1), w - sf.c2, w - sf.vA]))
if min(spurious_roots_diff) < tol or w < sf.c0 or w > min(sf.c1, sf.c2):
indices_to_rm.append(i)
Woptions_fast = np.delete(Woptions_fast, indices_to_rm)
Woptions_fast.sort()
# remove any higher order slow body modes - we only want to do the first 3 saus/kink
if len(Woptions_slow_body) > 6:
Woptions_slow_body = np.delete(Woptions_slow_body, range(len(Woptions_slow_body) - 6))
Woptions = np.concatenate((Woptions_slow_surf, Woptions_slow_body, Woptions_fast))
# set W to be the eigenfrequency for the requested mode
if 'fast-saus-body' in mode or 'fast-kink-surf' in mode:
W = Woptions_fast[-2]
elif 'fast-kink-body' in mode or 'fast-saus-surf' in mode:
W = Woptions_fast[-1]
elif 'slow' in mode and 'surf' in mode:
W = Woptions_slow_surf[mode_ind]
elif 'slow' in mode and 'body' in mode:
W = Woptions_slow_body[mode_ind-2]
if 'alfven' in mode:
W = sf.vA
else:
W = np.real(W)
# Quick plot to see if we are hitting correct mode
if show_quick_plot:
plt.plot([K] * len(Woptions), Woptions, '.')
plt.plot(K+0.5, W, 'go')
plt.xlim([0,23])
plt.show()
# ================================
# Dispersion diagram
# ================================
if show_dispersion:
if 'alfven' in mode:
raise NameError('Disperion plot requested for an alfven mode. Cant do that.')
dispersion_diagram.dispersion_diagram(mode_options, mode,
disp_rel_partial, K, W, R1)
# plt.tight_layout() # seems to make it chop the sides off with this
plt.savefig(os.path.join(save_dispersion_diagram_directory, 'R1_' + str(R1) + '_' + mode + '.png') )
plt.close()
# ================================
# Animation
# ================================
if show_animation:
print('Starting ' + mode)
# set grid parameters
xmin = -2.*K
xmax = 2.*K
ymin = 0.
ymax = 4.
zmin = 0.
zmax = 2*np.pi
# You can change ny but be careful changing nx, nz.
nx = 300#100 #100 #300 gives us reduced bouncing of field lines for the same video size, but there is significant computational cost.
ny = 300#100 #100 #100#20 #100
nz = 300#100 #100
nt = number_of_frames
if nz % nt != 0:
print("nt doesnt divide nz so there may be a problem with chopping in z direction for each time step")
t_start = 0.
t_end = zmax
t = t_start
xvals = np.linspace(xmin, xmax, nx)
yvals = np.linspace(ymin, ymax, ny)
zvals = np.linspace(zmin, zmax, nz, endpoint=False) # A fudge to give the height as exactly one wavelength
x_spacing = max(nx, ny, nz) / nx
y_spacing = max(nx, ny, nz) / ny
z_spacing = max(nx, ny, nz) / nz
# For masking points for plotting vector fields- have to do it manually due to Mayavi bug
mod = int(4 * nx / 100)
mod_y = int(np.ceil(mod / y_spacing))
# Get the data xi=displacement, v=velocity, b=mag field
if show_disp_top or show_disp_front:
xixvals = np.real(np.repeat(sf.xix(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xizvals = np.real(np.repeat(sf.xiz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
xiyvals = np.real(np.repeat(sf.xiy(mode, xvals, zvals, t, W, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front or show_vel_top:
vxvals = np.real(np.repeat(sf.vx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.real(np.repeat(sf.vy(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
if show_vel_front_pert or show_vel_top_pert:
vxvals = np.real(np.repeat(sf.vx_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vzvals = np.real(np.repeat(sf.vz_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
vyvals = np.zeros_like(vxvals)
# Axis is defined on the mag field so we have to set up this data
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
# displacement at the right and left boundaries
if show_boundary:
xix_boundary_r_vals = np.real(np.repeat(K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='r')[:, np.newaxis], ny, axis=1))
xix_boundary_l_vals = np.real(np.repeat(-K + sf.xix_boundary(mode, zvals, t, W, K, R1, boundary='l')[:, np.newaxis], ny, axis=1))
if show_density:
rho_vals = np.real(np.repeat(sf.rho(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
if show_density_pert:
rho_vals = np.real(np.repeat(sf.rho_pert(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
# ================================
# Starting figure and visualisation modules
# ================================
zgrid_zy, ygrid_zy = np.mgrid[0:nz:(nz)*1j,
0:ny:(ny)*1j]
fig = mlab.figure(size=res) # (1920, 1080) for 1080p , tuple(101 * np.array((16,9))) #16:9 aspect ratio for video upload
# Spacing of grid so that we can display a visualisation cube without having the same number of grid points in each dimension
spacing = np.array([x_spacing, z_spacing, y_spacing])
if show_density or show_density_pert:
# Scalar field density
rho = mlab.pipeline.scalar_field(rho_vals_t, name="density", figure=fig)
rho.spacing = spacing
mpf.volume_red_blue(rho, rho_vals_t)
#Masking points
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
xgrid, zgrid, ygrid = np.mgrid[0:nx:(nx)*1j,
0:nz:(nz)*1j,
0:ny:(ny)*1j]
field = mlab.pipeline.vector_field(bxvals_t, bzvals_t, byvals_t, name="B field",
figure=fig, scalars=zgrid)
field.spacing = spacing
if show_axes:
mpf.axes_no_label(field)
if show_mini_axis:
mpf.mini_axes()
if uniform_light:
#uniform lighting, but if we turn shading of volumes off, we are ok without
mpf.uniform_lighting(fig)
#Black background
mpf.background_colour(fig, (0., 0., 0.))
scalefactor = 8. * nx / 100. # scale factor for direction field vectors
# Set up visualisation modules
if show_mag_vec:
bdirfield_front = mlab.pipeline.vector_field(bxvals_mask_front_t, bzvals_mask_front_t,
byvals_mask_front_t, name="B field front",
figure=fig)
bdirfield_front.spacing = spacing
mpf.vector_cut_plane(bdirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_top or show_vel_top_pert:
vdirfield_top = mlab.pipeline.vector_field(vxvals_mask_top_t, np.zeros_like(vxvals_mask_top_t),
vyvals_mask_top_t, name="V field top",
figure=fig)
vdirfield_top.spacing = spacing
mpf.vector_cut_plane(vdirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_vel_front or show_vel_front_pert:
vdirfield_front = mlab.pipeline.vector_field(vxvals_mask_front_t, vzvals_mask_front_t,
vyvals_mask_front_t, name="V field front",
figure=fig)
vdirfield_front.spacing = spacing
mpf.vector_cut_plane(vdirfield_front,'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_top:
xidirfield_top = mlab.pipeline.vector_field(xixvals_mask_top_t, np.zeros_like(xixvals_mask_top_t),
xiyvals_mask_top_t, name="Xi field top",
figure=fig)
xidirfield_top.spacing = spacing
mpf.vector_cut_plane(xidirfield_top, 'top', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
if show_disp_front:
xidirfield_front = mlab.pipeline.vector_field(xixvals_mask_front_t, xizvals_mask_front_t,
xiyvals_mask_front_t, name="Xi field front",
figure=fig)
xidirfield_front.spacing = spacing
mpf.vector_cut_plane(xidirfield_front, 'front', nx, ny, nz,
y_spacing, scale_factor=scalefactor)
# Loop through time
for t_ind in range(nt):
if t_ind == 0:
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
if show_disp_top or show_disp_front:
xixvals_t = xixvals
xiyvals_t = xiyvals
xizvals_t = xizvals
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_t = vxvals
vyvals_t = vyvals
vzvals_t = vzvals
if show_boundary:
xix_boundary_r_vals_t = xix_boundary_r_vals
xix_boundary_l_vals_t = xix_boundary_l_vals
if show_density or show_density_pert:
rho_vals_t = rho_vals
else:
bxvals = np.real(np.repeat(sf.bx(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2))
byvals = np.real(np.repeat(sf.by(mode, xvals, zvals, t, K)[:, :, np.newaxis], ny, axis=2))
bz_eq3d = np.repeat(sf.bz_eq(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2)
bzvals = np.real(np.repeat(-sf.bz(mode, xvals, zvals, t, W, K, R1)[:, :, np.newaxis], ny, axis=2) +
bz_eq3d)
bxvals_t = bxvals
byvals_t = byvals
bzvals_t = bzvals
# Update mag field data
field.mlab_source.set(u=bxvals_t, v=bzvals_t, w=byvals_t)
# Update mag field visualisation module
if show_mag_vec:
bxvals_mask_front_t, byvals_mask_front_t, bzvals_mask_front_t = mpf.mask_points(bxvals_t, byvals_t, bzvals_t,
'front', mod, mod_y)
bdirfield_front.mlab_source.set(u=bxvals_mask_front_t, v=bzvals_mask_front_t, w=byvals_mask_front_t)
# Update displacement field data
if show_disp_top or show_disp_front:
xixvals_split = np.split(xixvals, [nz - (nz / nt) * t_ind], axis=1)
xiyvals_split = np.split(xiyvals, [nz - (nz / nt) * t_ind], axis=1)
xizvals_split = np.split(xizvals, [nz - (nz / nt) * t_ind], axis=1)
xixvals_t = np.concatenate((xixvals_split[1], xixvals_split[0]), axis=1)
xiyvals_t = np.concatenate((xiyvals_split[1], xiyvals_split[0]), axis=1)
xizvals_t = np.concatenate((xizvals_split[1], xizvals_split[0]), axis=1)
# Update displacement field visualisation module
if show_disp_top:
xixvals_mask_top_t, xiyvals_mask_top_t, xizvals_mask_top_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'top', mod, mod_y)
xidirfield_top.mlab_source.set(u=xixvals_mask_top_t, v=np.zeros_like(xixvals_mask_top_t), w=xiyvals_mask_top_t)
if show_disp_front:
xixvals_mask_front_t, xiyvals_mask_front_t, xizvals_mask_front_t = mpf.mask_points(xixvals_t, xiyvals_t, xizvals_t,
'front', mod, mod_y)
xidirfield_front.mlab_source.set(u=xixvals_mask_front_t, v=xizvals_mask_front_t, w=xiyvals_mask_front_t)
# Update velocity field data
if show_vel_top or show_vel_top_pert or show_vel_front or show_vel_front_pert:
vxvals_split = np.split(vxvals, [nz - (nz / nt) * t_ind], axis=1)
vyvals_split = np.split(vyvals, [nz - (nz / nt) * t_ind], axis=1)
vzvals_split = np.split(vzvals, [nz - (nz / nt) * t_ind], axis=1)
vxvals_t = np.concatenate((vxvals_split[1], vxvals_split[0]), axis=1)
vyvals_t = np.concatenate((vyvals_split[1], vyvals_split[0]), axis=1)
vzvals_t = np.concatenate((vzvals_split[1], vzvals_split[0]), axis=1)
# Update velocity field visualisation module
if show_vel_top or show_vel_top_pert:
vxvals_mask_top_t, vyvals_mask_top_t, vzvals_mask_top_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'top', mod, mod_y)
vdirfield_top.mlab_source.set(u=vxvals_mask_top_t, v=np.zeros_like(vxvals_mask_top_t), w=vyvals_mask_top_t)
if show_vel_front or show_vel_front_pert:
vxvals_mask_front_t, vyvals_mask_front_t, vzvals_mask_front_t = mpf.mask_points(vxvals_t, vyvals_t, vzvals_t,
'front', mod, mod_y)
vdirfield_front.mlab_source.set(u=vxvals_mask_front_t, v=vzvals_mask_front_t, w=vyvals_mask_front_t)
# Update boundary displacement data
if show_boundary:
xix_boundary_r_vals_split = np.split(xix_boundary_r_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_l_vals_split = np.split(xix_boundary_l_vals, [nz - (nz / nt) * t_ind], axis=0)
xix_boundary_r_vals_t = np.concatenate((xix_boundary_r_vals_split[1], xix_boundary_r_vals_split[0]), axis=0)
xix_boundary_l_vals_t = np.concatenate((xix_boundary_l_vals_split[1], xix_boundary_l_vals_split[0]), axis=0)
# Update density data
if show_density or show_density_pert:
rho_vals_split = np.split(rho_vals, [nz - (nz / nt) * t_ind], axis=1)
rho_vals_t = np.concatenate((rho_vals_split[1], rho_vals_split[0]), axis=1)
rho.mlab_source.set(scalars=rho_vals_t)
# Boundary data - Letting mayavi know where to plot the boundary
if show_boundary:
ext_min_r = ((nx) * (xix_boundary_r_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_r = ((nx) * (xix_boundary_r_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
ext_min_l = ((nx) * (xix_boundary_l_vals_t.min() - xmin) / (xmax - xmin)) * x_spacing
ext_max_l = ((nx) * (xix_boundary_l_vals_t.max() - xmin) / (xmax - xmin)) * x_spacing
#Make field lines
if show_mag:
# move seed points up with phase speed. - Bit of a fudge.
# Create an array of points for which we want mag field seeds
nx_seed = 9
ny_seed = 13
start_x = 30. * nx / 100.
end_x = nx+1 - start_x
start_y = 1.
if ny == 20: # so that the lines dont go right up to the edge of the box
end_y = ny - 1.
elif ny == 100:
end_y = ny - 2.
elif ny == 300:
end_y = ny - 6.
else:
end_y = ny - 1
seeds=[]
dx_res = (end_x - start_x) / (nx_seed-1)
dy_res = (end_y - start_y) / (ny_seed-1)
for j in range(ny_seed):
for i in range(nx_seed):
x = start_x + (i * dx_res) * x_spacing
y = start_y + (j * dy_res) * y_spacing
z = 1. + (t_start + t_ind*(t_end - t_start)/nt)/zmax * nz
seeds.append((x,z,y))
if 'alfven' in mode:
for i in range(nx_seed):
del seeds[0]
del seeds[-1]
# Remove previous field lines - field lines cannot be updated, just the data that they are built from
if t_ind != 0:
field_lines.remove() # field_lines is defined in first go through loop
field_lines = SeedStreamline(seed_points=seeds)
# Field line visualisation tinkering
field_lines.stream_tracer.integration_direction='both'
field_lines.streamline_type = 'tube'
field_lines.stream_tracer.maximum_propagation = nz * 2
field_lines.tube_filter.number_of_sides = 20
field_lines.tube_filter.radius = 0.7 * max(nx, ny, nz) / 100.
field_lines.tube_filter.capping = True
field_lines.actor.property.opacity = 1.0
field.add_child(field_lines)
module_manager = field_lines.parent
# Colormap of magnetic field strength plotted on the field lines
if show_mag_scale:
module_manager.scalar_lut_manager.lut_mode = 'coolwarm'
module_manager.scalar_lut_manager.data_range=[7,18]
else:
mag_lut = module_manager.scalar_lut_manager.lut.table.to_array()
mag_lut[:,0] = [220]*256
mag_lut[:,1] = [20]*256
mag_lut[:,2] = [20]*256
module_manager.scalar_lut_manager.lut.table = mag_lut
if show_mag_fade:
mpf.colormap_fade(module_manager, fade_value=20)
# Which views do you want to show? Options are defined at the start
views_selected = [0]#[0,1,4,5,6] #range(7) #[2,3]
for view_ind, view_selected in enumerate(views_selected):
view = view_options[view_selected]
# Display boundary - cannot be updated each time
if show_boundary:
# Boundaries should look different depending on view
if view == 'front-parallel':
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 125
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=1., representation='wireframe',
line_width=12., scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
else:
#remove previous boundaries
if t != 0 or view_ind != 0:
boundary_r.remove()
boundary_l.remove()
# Make a fading colormap by changing opacity at ends
lut = np.reshape(np.array([150, 150, 150, 255]*256), (256,4))
fade_value = 20
lut[:fade_value,-1] = np.linspace(0, 255, fade_value)
lut[-fade_value:,-1] = np.linspace(255, 0, fade_value)
# Set up boundary visualisation
boundary_r = mlab.mesh(xix_boundary_r_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_r, ext_max_r, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
boundary_l = mlab.mesh(xix_boundary_l_vals_t, zgrid_zy, ygrid_zy,
extent=[ext_min_l, ext_max_l, 1, nz, 0, (ny-1) * y_spacing],
opacity=0.7, scalars=zgrid_zy)
# Boundary color and other options
boundary_r.module_manager.scalar_lut_manager.lut.table = lut
boundary_l.module_manager.scalar_lut_manager.lut.table = lut
boundary_r.actor.property.lighting = False
boundary_r.actor.property.shading = False
boundary_l.actor.property.lighting = False
boundary_l.actor.property.shading = False
# Set viewing angle - For some unknown reason we must redefine the camera position each time.
# This is something to do with the boundaries being replaced each time.
mpf.view_position(fig, view, nx, ny, nz)
if save_images:
prefix = 'R1_'+str(R1) + '_' + mode + '_' + vis_mod_string + view + '_'# + '_norho_'
mlab.savefig(os.path.join(save_directory, prefix + str(t_ind+1) + '.png'))
if t_ind == nt - 1:
if make_video:
i2v.image2video(filepath=save_directory, prefix=prefix,
output_name=prefix+'video', out_extension='mp4',
fps=fps, n_loops=4, delete_images=True,
delete_old_videos=True, res=res[1])
# Log: to keep us updated with progress
if t_ind % 5 == 4:
print('Finished frame number ' + str(t_ind + 1) + ' out of ' + str(number_of_frames))
#Release some memory after each time step
gc.collect()
#step t forward
t = t + (t_end - t_start) / nt
# Close Mayavi window each time if we cant to make a video
if make_video:
mlab.close(fig)
print('Finished ' + mode) | mit |
39dotyt/closure-linter-now | closure_linter/strict_test.py | 125 | 1964 | #!/usr/bin/env python
# Copyright 2013 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for gjslint --strict.
Tests errors that can be thrown by gjslint when in strict mode.
"""
import unittest
import gflags as flags
import unittest as googletest
from closure_linter import errors
from closure_linter import runner
from closure_linter.common import erroraccumulator
flags.FLAGS.strict = True
class StrictTest(unittest.TestCase):
"""Tests scenarios where strict generates warnings."""
def testUnclosedString(self):
"""Tests warnings are reported when nothing is disabled.
b/11450054.
"""
original = [
'bug = function() {',
' (\'foo\'\');',
'};',
'',
]
expected = [errors.FILE_DOES_NOT_PARSE, errors.MULTI_LINE_STRING,
errors.FILE_IN_BLOCK]
self._AssertErrors(original, expected)
def _AssertErrors(self, original, expected_errors):
"""Asserts that the error fixer corrects original to expected."""
# Trap gjslint's output parse it to get messages added.
error_accumulator = erroraccumulator.ErrorAccumulator()
runner.Run('testing.js', error_accumulator, source=original)
error_nums = [e.code for e in error_accumulator.GetErrors()]
error_nums.sort()
expected_errors.sort()
self.assertListEqual(error_nums, expected_errors)
if __name__ == '__main__':
googletest.main()
| apache-2.0 |
cvium/Flexget | flexget/plugins/input/input_csv.py | 13 | 2386 | from __future__ import unicode_literals, division, absolute_import
import logging
import csv
from requests import RequestException
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils.cached_input import cached
log = logging.getLogger('csv')
class InputCSV(object):
"""
Adds support for CSV format. Configuration may seem a bit complex,
but this has advantage of being universal solution regardless of CSV
and internal entry fields.
Configuration format:
csv:
url: <url>
values:
<field>: <number>
Example DB-fansubs:
csv:
url: http://www.dattebayo.com/t/dump
values:
title: 3 # title is in 3th field
url: 1 # download url is in 1st field
Fields title and url are mandatory. First field is 1.
List of other common (optional) fields can be found from wiki.
"""
schema = {
'type': 'object',
'properties': {
'url': {'type': 'string', 'format': 'url'},
'values': {
'type': 'object',
'additionalProperties': {'type': 'integer'},
'required': ['title', 'url']
}
},
'required': ['url', 'values'],
'additionalProperties': False
}
@cached('csv')
def on_task_input(self, task, config):
entries = []
try:
r = task.requests.get(config['url'])
except RequestException as e:
raise plugin.PluginError('Error fetching `%s`: %s' % (config['url'], e))
# CSV module needs byte strings, we'll convert back to unicode later
page = r.text.encode('utf-8').splitlines()
for row in csv.reader(page):
if not row:
continue
entry = Entry()
for name, index in config.get('values', {}).items():
try:
# Convert the value back to unicode
entry[name] = row[index - 1].decode('utf-8').strip()
except IndexError:
raise plugin.PluginError('Field `%s` index is out of range' % name)
entries.append(entry)
return entries
@event('plugin.register')
def register_plugin():
plugin.register(InputCSV, 'csv', api_ver=2)
| mit |
muffl0n/ansible | lib/ansible/plugins/callback/__init__.py | 2 | 10965 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import difflib
import warnings
from copy import deepcopy
from ansible.compat.six import string_types
from ansible import constants as C
from ansible.vars import strip_internal_keys
from ansible.utils.unicode import to_unicode
try:
from __main__ import display as global_display
except ImportError:
from ansible.utils.display import Display
global_display = Display()
__all__ = ["CallbackBase"]
class CallbackBase:
'''
This is a base ansible callback class that does nothing. New callbacks should
use this class as a base and override any callback methods they wish to execute
custom actions.
'''
def __init__(self, display=None):
if display:
self._display = display
else:
self._display = global_display
if self._display.verbosity >= 4:
name = getattr(self, 'CALLBACK_NAME', 'unnamed')
ctype = getattr(self, 'CALLBACK_TYPE', 'old')
version = getattr(self, 'CALLBACK_VERSION', '1.0')
self._display.vvvv('Loaded callback %s of type %s, v%s' % (name, ctype, version))
def _dump_results(self, result, indent=None, sort_keys=True, keep_invocation=False):
if result.get('_ansible_no_log', False):
return json.dumps(dict(censored="the output has been hidden due to the fact that 'no_log: true' was specified for this result"))
if not indent and '_ansible_verbose_always' in result and result['_ansible_verbose_always']:
indent = 4
# All result keys stating with _ansible_ are internal, so remove them from the result before we output anything.
abridged_result = strip_internal_keys(result)
# remove invocation unless specifically wanting it
if not keep_invocation and self._display.verbosity < 3 and 'invocation' in result:
del abridged_result['invocation']
return json.dumps(abridged_result, indent=indent, ensure_ascii=False, sort_keys=sort_keys)
def _handle_warnings(self, res):
''' display warnings, if enabled and any exist in the result '''
if C.COMMAND_WARNINGS and 'warnings' in res and res['warnings']:
for warning in res['warnings']:
self._display.warning(warning)
def _get_diff(self, difflist):
if not isinstance(difflist, list):
difflist = [difflist]
ret = []
for diff in difflist:
try:
with warnings.catch_warnings():
warnings.simplefilter('ignore')
ret = []
if 'dst_binary' in diff:
ret.append("diff skipped: destination file appears to be binary\n")
if 'src_binary' in diff:
ret.append("diff skipped: source file appears to be binary\n")
if 'dst_larger' in diff:
ret.append("diff skipped: destination file size is greater than %d\n" % diff['dst_larger'])
if 'src_larger' in diff:
ret.append("diff skipped: source file size is greater than %d\n" % diff['src_larger'])
if 'before' in diff and 'after' in diff:
if 'before_header' in diff:
before_header = "before: %s" % diff['before_header']
else:
before_header = 'before'
if 'after_header' in diff:
after_header = "after: %s" % diff['after_header']
else:
after_header = 'after'
differ = difflib.unified_diff(to_unicode(diff['before']).splitlines(True), to_unicode(diff['after']).splitlines(True), before_header, after_header, '', '', 10)
ret.extend(list(differ))
ret.append('\n')
return u"".join(ret)
except UnicodeDecodeError:
ret.append(">> the files are different, but the diff library cannot compare unicode strings\n\n")
def _get_item(self, result):
if result.get('_ansible_no_log', False):
item = "(censored due to no_log)"
else:
item = result.get('item', None)
return item
def _process_items(self, result):
for res in result._result['results']:
newres = deepcopy(result)
res['item'] = self._get_item(res)
newres._result = res
if 'failed' in res and res['failed']:
self.v2_playbook_item_on_failed(newres)
elif 'skipped' in res and res['skipped']:
self.v2_playbook_item_on_skipped(newres)
else:
self.v2_playbook_item_on_ok(newres)
def set_play_context(self, play_context):
pass
def on_any(self, *args, **kwargs):
pass
def runner_on_failed(self, host, res, ignore_errors=False):
pass
def runner_on_ok(self, host, res):
pass
def runner_on_skipped(self, host, item=None):
pass
def runner_on_unreachable(self, host, res):
pass
def runner_on_no_hosts(self):
pass
def runner_on_async_poll(self, host, res, jid, clock):
pass
def runner_on_async_ok(self, host, res, jid):
pass
def runner_on_async_failed(self, host, res, jid):
pass
def playbook_on_start(self):
pass
def playbook_on_notify(self, host, handler):
pass
def playbook_on_no_hosts_matched(self):
pass
def playbook_on_no_hosts_remaining(self):
pass
def playbook_on_task_start(self, name, is_conditional):
pass
def playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
pass
def playbook_on_setup(self):
pass
def playbook_on_import_for_host(self, host, imported_file):
pass
def playbook_on_not_import_for_host(self, host, missing_file):
pass
def playbook_on_play_start(self, name):
pass
def playbook_on_stats(self, stats):
pass
def on_file_diff(self, host, diff):
pass
####### V2 METHODS, by default they call v1 counterparts if possible ######
def v2_on_any(self, *args, **kwargs):
self.on_any(args, kwargs)
def v2_runner_on_failed(self, result, ignore_errors=False):
host = result._host.get_name()
self.runner_on_failed(host, result._result, ignore_errors)
def v2_runner_on_ok(self, result):
host = result._host.get_name()
self.runner_on_ok(host, result._result)
def v2_runner_on_skipped(self, result):
if C.DISPLAY_SKIPPED_HOSTS:
host = result._host.get_name()
self.runner_on_skipped(host, self._get_item(getattr(result._result,'results',{})))
def v2_runner_on_unreachable(self, result):
host = result._host.get_name()
self.runner_on_unreachable(host, result._result)
def v2_runner_on_no_hosts(self, task):
self.runner_on_no_hosts()
def v2_runner_on_async_poll(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
#FIXME, get real clock
clock = 0
self.runner_on_async_poll(host, result._result, jid, clock)
def v2_runner_on_async_ok(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
self.runner_on_async_ok(host, result._result, jid)
def v2_runner_on_async_failed(self, result):
host = result._host.get_name()
jid = result._result.get('ansible_job_id')
self.runner_on_async_failed(host, result._result, jid)
def v2_runner_on_file_diff(self, result, diff):
pass #no v1 correspondance
def v2_playbook_on_start(self):
self.playbook_on_start()
def v2_playbook_on_notify(self, result, handler):
host = result._host.get_name()
self.playbook_on_notify(host, handler)
def v2_playbook_on_no_hosts_matched(self):
self.playbook_on_no_hosts_matched()
def v2_playbook_on_no_hosts_remaining(self):
self.playbook_on_no_hosts_remaining()
def v2_playbook_on_task_start(self, task, is_conditional):
self.playbook_on_task_start(task, is_conditional)
def v2_playbook_on_cleanup_task_start(self, task):
pass #no v1 correspondance
def v2_playbook_on_handler_task_start(self, task):
pass #no v1 correspondance
def v2_playbook_on_vars_prompt(self, varname, private=True, prompt=None, encrypt=None, confirm=False, salt_size=None, salt=None, default=None):
self.playbook_on_vars_prompt(varname, private, prompt, encrypt, confirm, salt_size, salt, default)
def v2_playbook_on_setup(self):
self.playbook_on_setup()
def v2_playbook_on_import_for_host(self, result, imported_file):
host = result._host.get_name()
self.playbook_on_import_for_host(host, imported_file)
def v2_playbook_on_not_import_for_host(self, result, missing_file):
host = result._host.get_name()
self.playbook_on_not_import_for_host(host, missing_file)
def v2_playbook_on_play_start(self, play):
self.playbook_on_play_start(play.name)
def v2_playbook_on_stats(self, stats):
self.playbook_on_stats(stats)
def v2_on_file_diff(self, result):
host = result._host.get_name()
if 'diff' in result._result:
self.on_file_diff(host, result._result['diff'])
def v2_playbook_on_item_ok(self, result):
pass # no v1
def v2_playbook_on_item_failed(self, result):
pass # no v1
def v2_playbook_on_item_skipped(self, result):
pass # no v1
def v2_playbook_on_include(self, included_file):
pass #no v1 correspondance
def v2_playbook_item_on_ok(self, result):
pass
def v2_playbook_item_on_failed(self, result):
pass
def v2_playbook_item_on_skipped(self, result):
pass
| gpl-3.0 |
bundgus/python-playground | ssh-playground/demo_sftp.py | 1 | 2786 | #!/usr/bin/env python
# Copyright (C) 2003-2007 Robey Pointer <robeypointer@gmail.com>
#
# This file is part of paramiko.
#
# Paramiko is free software; you can redistribute it and/or modify it under the
# terms of the GNU Lesser General Public License as published by the Free
# Software Foundation; either version 2.1 of the License, or (at your option)
# any later version.
#
# Paramiko is distributed in the hope that it will be useful, but WITHOUT ANY
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR
# A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Paramiko; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA.
# based on code provided by raymond mosteller (thanks!)
import base64
import getpass
import os
import socket
import sys
import traceback
import paramiko
from paramiko.py3compat import input
# setup logging
paramiko.util.log_to_file('demo_sftp.log')
hostname = '192.168.1.11'
port = 22
username = 'username'
password = 'password'
# Paramiko client configuration
UseGSSAPI = False # enable GSS-API / SSPI authentication
DoGSSAPIKeyExchange = False
# now, connect and use paramiko Transport to negotiate SSH2 across the connection
try:
t = paramiko.Transport((hostname, port))
t.connect(None, username, password, gss_host=socket.getfqdn(hostname),
gss_auth=UseGSSAPI, gss_kex=DoGSSAPIKeyExchange)
sftp = paramiko.SFTPClient.from_transport(t)
# dirlist on remote host
dirlist = sftp.listdir('.')
print("Dirlist: %s" % dirlist)
# copy this demo onto the server
try:
sftp.mkdir("demo_sftp_folder")
except IOError:
print('(assuming demo_sftp_folder/ already exists)')
with sftp.open('demo_sftp_folder/README', 'w') as f:
f.write('This was created by demo_sftp.py.\n')
with open('demo_sftp.py', 'r') as f:
data = f.read()
sftp.open('demo_sftp_folder/demo_sftp.py', 'w').write(data)
print('created demo_sftp_folder/ on the server')
# copy the README back here
with sftp.open('demo_sftp_folder/README', 'r') as f:
data = f.read()
with open('README_demo_sftp', 'w') as f:
f.write(data.decode('utf-8'))
print('copied README back here')
# BETTER: use the get() and put() methods
sftp.put('demo_sftp.py', 'demo_sftp_folder/demo_sftp.py')
sftp.get('demo_sftp_folder/README', 'README_demo_sftp')
t.close()
except Exception as e:
print('*** Caught exception: %s: %s' % (e.__class__, e))
traceback.print_exc()
try:
t.close()
except:
pass
sys.exit(1)
| mit |
hungtt57/matchmaker | lib/python2.7/site-packages/django/utils/tzinfo.py | 82 | 3932 | "Implementation of tzinfo classes for use with datetime.datetime."
from __future__ import unicode_literals
import time
import warnings
from datetime import timedelta, tzinfo
from django.utils.deprecation import RemovedInDjango19Warning
from django.utils.encoding import (
DEFAULT_LOCALE_ENCODING, force_str, force_text,
)
warnings.warn(
"django.utils.tzinfo will be removed in Django 1.9. "
"Use django.utils.timezone instead.",
RemovedInDjango19Warning, stacklevel=2)
# Python's doc say: "A tzinfo subclass must have an __init__() method that can
# be called with no arguments". FixedOffset and LocalTimezone don't honor this
# requirement. Defining __getinitargs__ is sufficient to fix copy/deepcopy as
# well as pickling/unpickling.
class FixedOffset(tzinfo):
"Fixed offset in minutes east from UTC."
def __init__(self, offset):
warnings.warn(
"django.utils.tzinfo.FixedOffset will be removed in Django 1.9. "
"Use django.utils.timezone.get_fixed_timezone instead.",
RemovedInDjango19Warning)
if isinstance(offset, timedelta):
self.__offset = offset
offset = self.__offset.seconds // 60
else:
self.__offset = timedelta(minutes=offset)
sign = '-' if offset < 0 else '+'
self.__name = "%s%02d%02d" % (sign, abs(offset) / 60., abs(offset) % 60)
def __repr__(self):
return self.__name
def __getinitargs__(self):
return self.__offset,
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return timedelta(0)
# This implementation is used for display purposes. It uses an approximation
# for DST computations on dates >= 2038.
# A similar implementation exists in django.utils.timezone. It's used for
# timezone support (when USE_TZ = True) and focuses on correctness.
class LocalTimezone(tzinfo):
"Proxy timezone information from time module."
def __init__(self, dt):
warnings.warn(
"django.utils.tzinfo.LocalTimezone will be removed in Django 1.9. "
"Use django.utils.timezone.get_default_timezone instead.",
RemovedInDjango19Warning)
tzinfo.__init__(self)
self.__dt = dt
self._tzname = self.tzname(dt)
def __repr__(self):
return force_str(self._tzname)
def __getinitargs__(self):
return self.__dt,
def utcoffset(self, dt):
if self._isdst(dt):
return timedelta(seconds=-time.altzone)
else:
return timedelta(seconds=-time.timezone)
def dst(self, dt):
if self._isdst(dt):
return timedelta(seconds=-time.altzone) - timedelta(seconds=-time.timezone)
else:
return timedelta(0)
def tzname(self, dt):
is_dst = False if dt is None else self._isdst(dt)
try:
return force_text(time.tzname[is_dst], DEFAULT_LOCALE_ENCODING)
except UnicodeDecodeError:
return None
def _isdst(self, dt):
tt = (dt.year, dt.month, dt.day,
dt.hour, dt.minute, dt.second,
dt.weekday(), 0, 0)
try:
stamp = time.mktime(tt)
except (OverflowError, ValueError):
# 32 bit systems can't handle dates after Jan 2038, and certain
# systems can't handle dates before ~1901-12-01:
#
# >>> time.mktime((1900, 1, 13, 0, 0, 0, 0, 0, 0))
# OverflowError: mktime argument out of range
# >>> time.mktime((1850, 1, 13, 0, 0, 0, 0, 0, 0))
# ValueError: year out of range
#
# In this case, we fake the date, because we only care about the
# DST flag.
tt = (2037,) + tt[1:]
stamp = time.mktime(tt)
tt = time.localtime(stamp)
return tt.tm_isdst > 0
| mit |
yunikkk/omim | 3party/freetype/src/tools/docmaker/docmaker.py | 165 | 3183 | #!/usr/bin/env python
#
# docmaker.py
#
# Convert source code markup to HTML documentation.
#
# Copyright 2002-2015 by
# David Turner.
#
# This file is part of the FreeType project, and may only be used,
# modified, and distributed under the terms of the FreeType project
# license, LICENSE.TXT. By continuing to use, modify, or distribute
# this file you indicate that you have read the license and
# understand and accept it fully.
#
# This program is a re-write of the original DocMaker tool used to generate
# the API Reference of the FreeType font rendering engine by converting
# in-source comments into structured HTML.
#
# This new version is capable of outputting XML data as well as accepting
# more liberal formatting options. It also uses regular expression matching
# and substitution to speed up operation significantly.
#
from sources import *
from content import *
from utils import *
from formatter import *
from tohtml import *
import utils
import sys, os, time, string, glob, getopt
def usage():
print "\nDocMaker Usage information\n"
print " docmaker [options] file1 [file2 ...]\n"
print "using the following options:\n"
print " -h : print this page"
print " -t : set project title, as in '-t \"My Project\"'"
print " -o : set output directory, as in '-o mydir'"
print " -p : set documentation prefix, as in '-p ft2'"
print ""
print " --title : same as -t, as in '--title=\"My Project\"'"
print " --output : same as -o, as in '--output=mydir'"
print " --prefix : same as -p, as in '--prefix=ft2'"
def main( argv ):
"""Main program loop."""
global output_dir
try:
opts, args = getopt.getopt( sys.argv[1:],
"ht:o:p:",
["help", "title=", "output=", "prefix="] )
except getopt.GetoptError:
usage()
sys.exit( 2 )
if args == []:
usage()
sys.exit( 1 )
# process options
project_title = "Project"
project_prefix = None
output_dir = None
for opt in opts:
if opt[0] in ( "-h", "--help" ):
usage()
sys.exit( 0 )
if opt[0] in ( "-t", "--title" ):
project_title = opt[1]
if opt[0] in ( "-o", "--output" ):
utils.output_dir = opt[1]
if opt[0] in ( "-p", "--prefix" ):
project_prefix = opt[1]
check_output()
# create context and processor
source_processor = SourceProcessor()
content_processor = ContentProcessor()
# retrieve the list of files to process
file_list = make_file_list( args )
for filename in file_list:
source_processor.parse_file( filename )
content_processor.parse_sources( source_processor )
# process sections
content_processor.finish()
formatter = HtmlFormatter( content_processor,
project_title,
project_prefix )
formatter.toc_dump()
formatter.index_dump()
formatter.section_dump_all()
# if called from the command line
if __name__ == '__main__':
main( sys.argv )
# eof
| apache-2.0 |
zhaodelong/django | django/conf/locale/sk/formats.py | 504 | 1173 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
from __future__ import unicode_literals
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y'
TIME_FORMAT = 'G:i'
DATETIME_FORMAT = 'j. F Y G:i'
YEAR_MONTH_FORMAT = 'F Y'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'd.m.Y'
SHORT_DATETIME_FORMAT = 'd.m.Y G:i'
FIRST_DAY_OF_WEEK = 1 # Monday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = [
'%d.%m.%Y', '%d.%m.%y', # '25.10.2006', '25.10.06'
'%y-%m-%d', # '06-10-25'
# '%d. %B %Y', '%d. %b. %Y', # '25. October 2006', '25. Oct. 2006'
]
DATETIME_INPUT_FORMATS = [
'%d.%m.%Y %H:%M:%S', # '25.10.2006 14:30:59'
'%d.%m.%Y %H:%M:%S.%f', # '25.10.2006 14:30:59.000200'
'%d.%m.%Y %H:%M', # '25.10.2006 14:30'
'%d.%m.%Y', # '25.10.2006'
]
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '\xa0' # non-breaking space
NUMBER_GROUPING = 3
| bsd-3-clause |
joanneko/goodbooks | goodbooks/lib/python2.7/site-packages/requests/packages/chardet/chardetect.py | 1786 | 2504 | #!/usr/bin/env python
"""
Script which takes one or more file paths and reports on their detected
encodings
Example::
% chardetect somefile someotherfile
somefile: windows-1252 with confidence 0.5
someotherfile: ascii with confidence 1.0
If no paths are provided, it takes its input from stdin.
"""
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import sys
from io import open
from chardet import __version__
from chardet.universaldetector import UniversalDetector
def description_of(lines, name='stdin'):
"""
Return a string describing the probable encoding of a file or
list of strings.
:param lines: The lines to get the encoding of.
:type lines: Iterable of bytes
:param name: Name of file or collection of lines
:type name: str
"""
u = UniversalDetector()
for line in lines:
u.feed(line)
u.close()
result = u.result
if result['encoding']:
return '{0}: {1} with confidence {2}'.format(name, result['encoding'],
result['confidence'])
else:
return '{0}: no result'.format(name)
def main(argv=None):
'''
Handles command line arguments and gets things started.
:param argv: List of arguments, as if specified on the command-line.
If None, ``sys.argv[1:]`` is used instead.
:type argv: list of str
'''
# Get command line arguments
parser = argparse.ArgumentParser(
description="Takes one or more file paths and reports their detected \
encodings",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
conflict_handler='resolve')
parser.add_argument('input',
help='File whose encoding we would like to determine.',
type=argparse.FileType('rb'), nargs='*',
default=[sys.stdin])
parser.add_argument('--version', action='version',
version='%(prog)s {0}'.format(__version__))
args = parser.parse_args(argv)
for f in args.input:
if f.isatty():
print("You are running chardetect interactively. Press " +
"CTRL-D twice at the start of a blank line to signal the " +
"end of your input. If you want help, run chardetect " +
"--help\n", file=sys.stderr)
print(description_of(f, f.name))
if __name__ == '__main__':
main()
| bsd-3-clause |
kmaglione/olympia | apps/addons/tests/test_helpers.py | 14 | 4888 | from mock import Mock
from nose.tools import eq_
from pyquery import PyQuery
import amo
import amo.tests
from addons.helpers import (statusflags, flag, contribution, performance_note,
mobile_persona_preview, mobile_persona_confirm)
from addons.models import Addon
class TestHelpers(amo.tests.TestCase):
fixtures = ['base/addon_3615', 'base/users',
'addons/featured', 'base/collections',
'base/featured', 'bandwagon/featured_collections']
def test_statusflags(self):
ctx = {'APP': amo.FIREFOX, 'LANG': 'en-US'}
# unreviewed
a = Addon(status=amo.STATUS_UNREVIEWED)
eq_(statusflags(ctx, a), 'unreviewed')
# recommended
featured = Addon.objects.get(pk=1003)
eq_(statusflags(ctx, featured), 'featuredaddon')
# category featured
featured = Addon.objects.get(pk=1001)
eq_(statusflags(ctx, featured), 'featuredaddon')
def test_flags(self):
ctx = {'APP': amo.FIREFOX, 'LANG': 'en-US'}
# unreviewed
a = Addon(status=amo.STATUS_UNREVIEWED)
eq_(flag(ctx, a), '<h5 class="flag">Not Reviewed</h5>')
# recommended
featured = Addon.objects.get(pk=1003)
eq_(flag(ctx, featured), '<h5 class="flag">Featured</h5>')
# category featured
featured = Addon.objects.get(pk=1001)
eq_(flag(ctx, featured), '<h5 class="flag">Featured</h5>')
def test_contribution_box(self):
a = Addon.objects.get(pk=7661)
a.suggested_amount = '12'
settings = Mock()
settings.MAX_CONTRIBUTION = 5
request = Mock()
request.GET = {'src': 'direct'}
c = {'LANG': 'en-us', 'APP': amo.FIREFOX, 'settings': settings,
'request': request}
s = contribution(c, a)
doc = PyQuery(s)
# make sure input boxes are rendered correctly (bug 555867)
assert doc('input[name=onetime-amount]').length == 1
def test_src_retained(self):
a = Addon.objects.get(pk=7661)
a.suggested_amount = '12'
settings = Mock()
settings.MAX_CONTRIBUTION = 5
request = Mock()
c = {'LANG': 'en-us', 'APP': amo.FIREFOX, 'settings': settings,
'request': request}
s = contribution(c, a, contribution_src='browse')
doc = PyQuery(s)
eq_(doc('input[name=source]').attr('value'), 'browse')
def test_mobile_persona_preview(self):
ctx = {'APP': amo.FIREFOX, 'LANG': 'en-US'}
persona = Addon.objects.get(pk=15679).persona
s = mobile_persona_preview(ctx, persona)
doc = PyQuery(s)
bt = doc('.persona-preview div[data-browsertheme]')
assert bt
assert persona.preview_url in bt.attr('style')
eq_(persona.json_data, bt.attr('data-browsertheme'))
assert bt.find('p')
def _test_mobile_persona_ctx(self):
request = Mock()
request.APP = amo.FIREFOX
request.GET = {}
request.user.is_authenticated.return_value = False
request.amo_user.mobile_addons = []
return {'APP': amo.FIREFOX, 'LANG': 'en-US', 'request': request}
def test_mobile_persona_confirm_large(self):
persona = Addon.objects.get(id=15679).persona
s = mobile_persona_confirm(self._test_mobile_persona_ctx(), persona)
doc = PyQuery(s)
assert not doc('.persona-slider')
assert doc('.preview')
assert doc('.confirm-buttons .add')
assert doc('.confirm-buttons .cancel')
assert not doc('.more')
def test_mobile_persona_confirm_small(self):
persona = Addon.objects.get(id=15679).persona
s = mobile_persona_confirm(self._test_mobile_persona_ctx(), persona,
size='small')
doc = PyQuery(s)
assert doc('.persona-slider')
assert not doc('.persona-slider .preview')
assert doc('.confirm-buttons .add')
assert doc('.confirm-buttons .cancel')
more = doc('.more')
assert more
eq_(more.attr('href'), persona.addon.get_url_path())
class TestPerformanceNote(amo.tests.TestCase):
listing = '<div class="performance-note">'
not_listing = '<div class="notification performance-note">'
def setUp(self):
super(TestPerformanceNote, self).setUp()
request_mock = Mock()
request_mock.APP = amo.FIREFOX
self.ctx = {'request': request_mock, 'amo': amo}
def test_show_listing(self):
r = performance_note(self.ctx, 30, listing=True)
assert self.listing in r, r
def test_show_not_listing(self):
r = performance_note(self.ctx, 30)
assert self.not_listing in r, r
def test_only_fx(self):
self.ctx['request'].APP = amo.THUNDERBIRD
r = performance_note(self.ctx, 30)
eq_(r.strip(), '')
| bsd-3-clause |
eeshangarg/oh-mainline | vendor/packages/twisted/twisted/scripts/tap2deb.py | 17 | 7376 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
import sys, os, string, shutil
from twisted.python import usage
class MyOptions(usage.Options):
optFlags = [["unsigned", "u"]]
optParameters = [["tapfile", "t", "twistd.tap"],
["maintainer", "m", "", "The maintainer's name and email in a specific format: "
"'John Doe <johndoe@example.com>'"],
["protocol", "p", ""],
["description", "e", ""],
["long_description", "l", ""],
["set-version", "V", "1.0"],
["debfile", "d", None],
["type", "y", "tap", "type of configuration: 'tap', 'xml, 'source' or 'python' for .tac files"]]
#zsh_altArgDescr = {"foo":"use this description for foo instead"}
#zsh_multiUse = ["foo", "bar"]
#zsh_mutuallyExclusive = [("foo", "bar"), ("bar", "baz")]
zsh_actions = {"type":"(tap xml source python)"}
#zsh_actionDescr = {"logfile":"log file name", "random":"random seed"}
def postOptions(self):
if not self["maintainer"]:
raise usage.UsageError, "maintainer must be specified."
type_dict = {
'tap': 'file',
'python': 'python',
'source': 'source',
'xml': 'xml',
}
def save_to_file(file, text):
f = open(file, 'w')
f.write(text)
f.close()
def run():
try:
config = MyOptions()
config.parseOptions()
except usage.error, ue:
sys.exit("%s: %s" % (sys.argv[0], ue))
tap_file = config['tapfile']
base_tap_file = os.path.basename(config['tapfile'])
protocol = (config['protocol'] or os.path.splitext(base_tap_file)[0])
deb_file = config['debfile'] or 'twisted-'+protocol
version = config['set-version']
maintainer = config['maintainer']
description = config['description'] or ('A Twisted-based server for %(protocol)s' %
vars())
long_description = config['long_description'] or 'Automatically created by tap2deb'
twistd_option = type_dict[config['type']]
date = string.strip(os.popen('822-date').read())
directory = deb_file + '-' + version
python_version = '%s.%s' % sys.version_info[:2]
if os.path.exists(os.path.join('.build', directory)):
os.system('rm -rf %s' % os.path.join('.build', directory))
os.makedirs(os.path.join('.build', directory, 'debian'))
shutil.copy(tap_file, os.path.join('.build', directory))
save_to_file(os.path.join('.build', directory, 'debian', 'README.Debian'),
'''This package was auto-generated by tap2deb\n''')
save_to_file(os.path.join('.build', directory, 'debian', 'conffiles'),
'''\
/etc/init.d/%(deb_file)s
/etc/default/%(deb_file)s
/etc/%(base_tap_file)s
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'default'),
'''\
pidfile=/var/run/%(deb_file)s.pid
rundir=/var/lib/%(deb_file)s/
file=/etc/%(tap_file)s
logfile=/var/log/%(deb_file)s.log
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'init.d'),
'''\
#!/bin/sh
PATH=/sbin:/bin:/usr/sbin:/usr/bin
pidfile=/var/run/%(deb_file)s.pid \
rundir=/var/lib/%(deb_file)s/ \
file=/etc/%(tap_file)s \
logfile=/var/log/%(deb_file)s.log
[ -r /etc/default/%(deb_file)s ] && . /etc/default/%(deb_file)s
test -x /usr/bin/twistd%(python_version)s || exit 0
test -r $file || exit 0
test -r /usr/share/%(deb_file)s/package-installed || exit 0
case "$1" in
start)
echo -n "Starting %(deb_file)s: twistd"
start-stop-daemon --start --quiet --exec /usr/bin/twistd%(python_version)s -- \
--pidfile=$pidfile \
--rundir=$rundir \
--%(twistd_option)s=$file \
--logfile=$logfile
echo "."
;;
stop)
echo -n "Stopping %(deb_file)s: twistd"
start-stop-daemon --stop --quiet \
--pidfile $pidfile
echo "."
;;
restart)
$0 stop
$0 start
;;
force-reload)
$0 restart
;;
*)
echo "Usage: /etc/init.d/%(deb_file)s {start|stop|restart|force-reload}" >&2
exit 1
;;
esac
exit 0
''' % vars())
os.chmod(os.path.join('.build', directory, 'debian', 'init.d'), 0755)
save_to_file(os.path.join('.build', directory, 'debian', 'postinst'),
'''\
#!/bin/sh
update-rc.d %(deb_file)s defaults >/dev/null
invoke-rc.d %(deb_file)s start
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'prerm'),
'''\
#!/bin/sh
invoke-rc.d %(deb_file)s stop
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'postrm'),
'''\
#!/bin/sh
if [ "$1" = purge ]; then
update-rc.d %(deb_file)s remove >/dev/null
fi
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'changelog'),
'''\
%(deb_file)s (%(version)s) unstable; urgency=low
* Created by tap2deb
-- %(maintainer)s %(date)s
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'control'),
'''\
Source: %(deb_file)s
Section: net
Priority: extra
Maintainer: %(maintainer)s
Build-Depends-Indep: debhelper
Standards-Version: 3.5.6
Package: %(deb_file)s
Architecture: all
Depends: python%(python_version)s-twisted
Description: %(description)s
%(long_description)s
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'copyright'),
'''\
This package was auto-debianized by %(maintainer)s on
%(date)s
It was auto-generated by tap2deb
Upstream Author(s):
Moshe Zadka <moshez@twistedmatrix.com> -- tap2deb author
Copyright:
Insert copyright here.
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'dirs'),
'''\
etc/init.d
etc/default
var/lib/%(deb_file)s
usr/share/doc/%(deb_file)s
usr/share/%(deb_file)s
''' % vars())
save_to_file(os.path.join('.build', directory, 'debian', 'rules'),
'''\
#!/usr/bin/make -f
export DH_COMPAT=1
build: build-stamp
build-stamp:
dh_testdir
touch build-stamp
clean:
dh_testdir
dh_testroot
rm -f build-stamp install-stamp
dh_clean
install: install-stamp
install-stamp: build-stamp
dh_testdir
dh_testroot
dh_clean -k
dh_installdirs
# Add here commands to install the package into debian/tmp.
cp %(base_tap_file)s debian/tmp/etc/
cp debian/init.d debian/tmp/etc/init.d/%(deb_file)s
cp debian/default debian/tmp/etc/default/%(deb_file)s
cp debian/copyright debian/tmp/usr/share/doc/%(deb_file)s/
cp debian/README.Debian debian/tmp/usr/share/doc/%(deb_file)s/
touch debian/tmp/usr/share/%(deb_file)s/package-installed
touch install-stamp
binary-arch: build install
binary-indep: build install
dh_testdir
dh_testroot
dh_strip
dh_compress
dh_installchangelogs
dh_fixperms
dh_installdeb
dh_shlibdeps
dh_gencontrol
dh_md5sums
dh_builddeb
source diff:
@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false
binary: binary-indep binary-arch
.PHONY: build clean binary-indep binary-arch binary install
''' % vars())
os.chmod(os.path.join('.build', directory, 'debian', 'rules'), 0755)
os.chdir('.build/%(directory)s' % vars())
os.system('dpkg-buildpackage -rfakeroot'+ ['', ' -uc -us'][config['unsigned']])
if __name__ == '__main__':
run()
| agpl-3.0 |
tomlof/scikit-learn | doc/sphinxext/numpy_ext/numpydoc.py | 413 | 6182 | """
========
numpydoc
========
Sphinx extension that handles docstrings in the Numpy standard format. [1]
It will:
- Convert Parameters etc. sections to field lists.
- Convert See Also section to a See also entry.
- Renumber references.
- Extract the signature from the docstring, if it can't be determined
otherwise.
.. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard
"""
from __future__ import unicode_literals
import sys # Only needed to check Python version
import os
import re
import pydoc
from .docscrape_sphinx import get_doc_object
from .docscrape_sphinx import SphinxDocString
import inspect
def mangle_docstrings(app, what, name, obj, options, lines,
reference_offset=[0]):
cfg = dict(use_plots=app.config.numpydoc_use_plots,
show_class_members=app.config.numpydoc_show_class_members)
if what == 'module':
# Strip top title
title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*',
re.I | re.S)
lines[:] = title_re.sub('', "\n".join(lines)).split("\n")
else:
doc = get_doc_object(obj, what, "\n".join(lines), config=cfg)
if sys.version_info[0] < 3:
lines[:] = unicode(doc).splitlines()
else:
lines[:] = str(doc).splitlines()
if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \
obj.__name__:
if hasattr(obj, '__module__'):
v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__))
else:
v = dict(full_name=obj.__name__)
lines += [u'', u'.. htmlonly::', '']
lines += [u' %s' % x for x in
(app.config.numpydoc_edit_link % v).split("\n")]
# replace reference numbers so that there are no duplicates
references = []
for line in lines:
line = line.strip()
m = re.match(r'^.. \[([a-z0-9_.-])\]', line, re.I)
if m:
references.append(m.group(1))
# start renaming from the longest string, to avoid overwriting parts
references.sort(key=lambda x: -len(x))
if references:
for i, line in enumerate(lines):
for r in references:
if re.match(r'^\d+$', r):
new_r = "R%d" % (reference_offset[0] + int(r))
else:
new_r = u"%s%d" % (r, reference_offset[0])
lines[i] = lines[i].replace(u'[%s]_' % r,
u'[%s]_' % new_r)
lines[i] = lines[i].replace(u'.. [%s]' % r,
u'.. [%s]' % new_r)
reference_offset[0] += len(references)
def mangle_signature(app, what, name, obj,
options, sig, retann):
# Do not try to inspect classes that don't define `__init__`
if (inspect.isclass(obj) and
(not hasattr(obj, '__init__') or
'initializes x; see ' in pydoc.getdoc(obj.__init__))):
return '', ''
if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')):
return
if not hasattr(obj, '__doc__'):
return
doc = SphinxDocString(pydoc.getdoc(obj))
if doc['Signature']:
sig = re.sub("^[^(]*", "", doc['Signature'])
return sig, ''
def setup(app, get_doc_object_=get_doc_object):
global get_doc_object
get_doc_object = get_doc_object_
if sys.version_info[0] < 3:
app.connect(b'autodoc-process-docstring', mangle_docstrings)
app.connect(b'autodoc-process-signature', mangle_signature)
else:
app.connect('autodoc-process-docstring', mangle_docstrings)
app.connect('autodoc-process-signature', mangle_signature)
app.add_config_value('numpydoc_edit_link', None, False)
app.add_config_value('numpydoc_use_plots', None, False)
app.add_config_value('numpydoc_show_class_members', True, True)
# Extra mangling domains
app.add_domain(NumpyPythonDomain)
app.add_domain(NumpyCDomain)
#-----------------------------------------------------------------------------
# Docstring-mangling domains
#-----------------------------------------------------------------------------
try:
import sphinx # lazy to avoid test dependency
except ImportError:
CDomain = PythonDomain = object
else:
from sphinx.domains.c import CDomain
from sphinx.domains.python import PythonDomain
class ManglingDomainBase(object):
directive_mangling_map = {}
def __init__(self, *a, **kw):
super(ManglingDomainBase, self).__init__(*a, **kw)
self.wrap_mangling_directives()
def wrap_mangling_directives(self):
for name, objtype in self.directive_mangling_map.items():
self.directives[name] = wrap_mangling_directive(
self.directives[name], objtype)
class NumpyPythonDomain(ManglingDomainBase, PythonDomain):
name = 'np'
directive_mangling_map = {
'function': 'function',
'class': 'class',
'exception': 'class',
'method': 'function',
'classmethod': 'function',
'staticmethod': 'function',
'attribute': 'attribute',
}
class NumpyCDomain(ManglingDomainBase, CDomain):
name = 'np-c'
directive_mangling_map = {
'function': 'function',
'member': 'attribute',
'macro': 'function',
'type': 'class',
'var': 'object',
}
def wrap_mangling_directive(base_directive, objtype):
class directive(base_directive):
def run(self):
env = self.state.document.settings.env
name = None
if self.arguments:
m = re.match(r'^(.*\s+)?(.*?)(\(.*)?', self.arguments[0])
name = m.group(2).strip()
if not name:
name = self.arguments[0]
lines = list(self.content)
mangle_docstrings(env.app, objtype, name, None, None, lines)
# local import to avoid testing dependency
from docutils.statemachine import ViewList
self.content = ViewList(lines, self.content.parent)
return base_directive.run(self)
return directive
| bsd-3-clause |
FFMG/myoddweb.piger | myodd/boost/libs/python/pyste/src/Pyste/Exporter.py | 54 | 2680 | # Copyright Bruno da Silva de Oliveira 2003. Use, modification and
# distribution is subject to the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
import os.path
#==============================================================================
# Exporter
#==============================================================================
class Exporter(object):
'Base class for objects capable to generate boost.python code.'
INDENT = ' ' * 4
def __init__(self, info, parser_tail=None):
self.info = info
self.parser_tail = parser_tail
self.interface_file = None
self.declarations = []
def Name(self):
raise NotImplementedError(self.__class__.__name__)
def Tail(self):
return self.parser_tail
def Parse(self, parser):
self.parser = parser
header = self.info.include
tail = self.parser_tail
declarations, parser_header = parser.parse(header, tail)
self.parser_header = parser_header
self.SetDeclarations(declarations)
def SetParsedHeader(self, parsed_header):
self.parser_header = parsed_header
def SetDeclarations(self, declarations):
self.declarations = declarations
def GenerateCode(self, codeunit, exported_names):
self.WriteInclude(codeunit)
self.Export(codeunit, exported_names)
def WriteInclude(self, codeunit):
codeunit.Write('include', '#include <%s>\n' % self.info.include)
def Export(self, codeunit, exported_names):
'subclasses must override this to do the real work'
pass
def GetDeclarations(self, fullname):
decls = []
for decl in self.declarations:
if decl.FullName() == fullname:
decls.append(decl)
if not decls:
raise RuntimeError, 'no %s declaration found!' % fullname
return decls
def GetDeclaration(self, fullname):
decls = self.GetDeclarations(fullname)
#assert len(decls) == 1
return decls[0]
def Order(self):
'''Returns a string that uniquely identifies this instance. All
exporters will be sorted by Order before being exported.
'''
return 0, self.info.name
def Header(self):
return self.info.include
def __eq__(self, other):
return type(self) is type(other) and self.Name() == other.Name() \
and self.interface_file == other.interface_file
def __ne__(self, other):
return not self == other
| gpl-2.0 |
zgchizi/oppia-uc | core/domain/value_generators_domain_test.py | 23 | 1202 | # coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from core.domain import value_generators_domain
from core.tests import test_utils
class ValueGeneratorsUnitTests(test_utils.GenericTestBase):
"""Test the value generator registry."""
def test_value_generator_registry(self):
copier_id = 'Copier'
copier = value_generators_domain.Registry.get_generator_class_by_id(
copier_id)
self.assertEqual(copier().id, copier_id)
all_generator_classes = (
value_generators_domain.Registry.get_all_generator_classes())
self.assertEqual(len(all_generator_classes), 2)
| apache-2.0 |
htc-mirror/primou-ics-3.0.16-d5b834a | tools/perf/scripts/python/check-perf-trace.py | 11214 | 2503 | # perf script event handlers, generated by perf script -g python
# (c) 2010, Tom Zanussi <tzanussi@gmail.com>
# Licensed under the terms of the GNU GPL License version 2
#
# This script tests basic functionality such as flag and symbol
# strings, common_xxx() calls back into perf, begin, end, unhandled
# events, etc. Basically, if this script runs successfully and
# displays expected results, Python scripting support should be ok.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from Core import *
from perf_trace_context import *
unhandled = autodict()
def trace_begin():
print "trace_begin"
pass
def trace_end():
print_unhandled()
def irq__softirq_entry(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
vec):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "vec=%s\n" % \
(symbol_str("irq__softirq_entry", "vec", vec)),
def kmem__kmalloc(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
call_site, ptr, bytes_req, bytes_alloc,
gfp_flags):
print_header(event_name, common_cpu, common_secs, common_nsecs,
common_pid, common_comm)
print_uncommon(context)
print "call_site=%u, ptr=%u, bytes_req=%u, " \
"bytes_alloc=%u, gfp_flags=%s\n" % \
(call_site, ptr, bytes_req, bytes_alloc,
flag_str("kmem__kmalloc", "gfp_flags", gfp_flags)),
def trace_unhandled(event_name, context, event_fields_dict):
try:
unhandled[event_name] += 1
except TypeError:
unhandled[event_name] = 1
def print_header(event_name, cpu, secs, nsecs, pid, comm):
print "%-20s %5u %05u.%09u %8u %-20s " % \
(event_name, cpu, secs, nsecs, pid, comm),
# print trace fields not included in handler args
def print_uncommon(context):
print "common_preempt_count=%d, common_flags=%s, common_lock_depth=%d, " \
% (common_pc(context), trace_flag_str(common_flags(context)), \
common_lock_depth(context))
def print_unhandled():
keys = unhandled.keys()
if not keys:
return
print "\nunhandled events:\n\n",
print "%-40s %10s\n" % ("event", "count"),
print "%-40s %10s\n" % ("----------------------------------------", \
"-----------"),
for event_name in keys:
print "%-40s %10d\n" % (event_name, unhandled[event_name])
| gpl-2.0 |
BeATz-UnKNoWN/python-for-android | python3-alpha/python3-src/Tools/scripts/make_ctype.py | 116 | 2280 | #!/usr/bin/env python3
"""Script that generates the ctype.h-replacement in stringobject.c."""
NAMES = ("LOWER", "UPPER", "ALPHA", "DIGIT", "XDIGIT", "ALNUM", "SPACE")
print("""
#define FLAG_LOWER 0x01
#define FLAG_UPPER 0x02
#define FLAG_ALPHA (FLAG_LOWER|FLAG_UPPER)
#define FLAG_DIGIT 0x04
#define FLAG_ALNUM (FLAG_ALPHA|FLAG_DIGIT)
#define FLAG_SPACE 0x08
#define FLAG_XDIGIT 0x10
static unsigned int ctype_table[256] = {""")
for i in range(128):
c = chr(i)
flags = []
for name in NAMES:
if name in ("ALPHA", "ALNUM"):
continue
if name == "XDIGIT":
method = lambda: c.isdigit() or c.upper() in "ABCDEF"
else:
method = getattr(c, "is" + name.lower())
if method():
flags.append("FLAG_" + name)
rc = repr(c)
if c == '\v':
rc = "'\\v'"
elif c == '\f':
rc = "'\\f'"
if not flags:
print(" 0, /* 0x%x %s */" % (i, rc))
else:
print(" %s, /* 0x%x %s */" % ("|".join(flags), i, rc))
for i in range(128, 256, 16):
print(" %s," % ", ".join(16*["0"]))
print("};")
print("")
for name in NAMES:
print("#define IS%s(c) (ctype_table[Py_CHARMASK(c)] & FLAG_%s)" %
(name, name))
print("")
for name in NAMES:
name = "is" + name.lower()
print("#undef %s" % name)
print("#define %s(c) undefined_%s(c)" % (name, name))
print("""
static unsigned char ctype_tolower[256] = {""")
for i in range(0, 256, 8):
values = []
for i in range(i, i+8):
if i < 128:
c = chr(i)
if c.isupper():
i = ord(c.lower())
values.append("0x%02x" % i)
print(" %s," % ", ".join(values))
print("};")
print("""
static unsigned char ctype_toupper[256] = {""")
for i in range(0, 256, 8):
values = []
for i in range(i, i+8):
if i < 128:
c = chr(i)
if c.islower():
i = ord(c.upper())
values.append("0x%02x" % i)
print(" %s," % ", ".join(values))
print("};")
print("""
#define TOLOWER(c) (ctype_tolower[Py_CHARMASK(c)])
#define TOUPPER(c) (ctype_toupper[Py_CHARMASK(c)])
#undef tolower
#define tolower(c) undefined_tolower(c)
#undef toupper
#define toupper(c) undefined_toupper(c)
""")
| apache-2.0 |
agentr13/python-phonenumbers | python/phonenumbers/data/region_US.py | 5 | 3252 | """Auto-generated file, do not edit by hand. US metadata"""
from ..phonemetadata import NumberFormat, PhoneNumberDesc, PhoneMetadata
PHONE_METADATA_US = PhoneMetadata(id='US', country_code=1, international_prefix='011',
general_desc=PhoneNumberDesc(national_number_pattern='[2-9]\\d{9}', possible_number_pattern='\\d{7}(?:\\d{3})?'),
fixed_line=PhoneNumberDesc(national_number_pattern='(?:2(?:0[1-35-9]|1[02-9]|2[04589]|3[149]|4[08]|5[1-46]|6[0279]|7[026]|8[13])|3(?:0[1-57-9]|1[02-9]|2[0135]|3[014679]|4[67]|5[12]|6[014]|8[56])|4(?:0[124-9]|1[02-579]|2[3-5]|3[0245]|4[0235]|58|69|7[0589]|8[04])|5(?:0[1-57-9]|1[0235-8]|20|3[0149]|4[01]|5[19]|6[1-37]|7[013-5]|8[056])|6(?:0[1-35-9]|1[024-9]|2[03689]|3[016]|4[16]|5[017]|6[0-279]|78|8[12])|7(?:0[1-46-8]|1[02-9]|2[0457]|3[1247]|4[07]|5[47]|6[02359]|7[02-59]|8[156])|8(?:0[1-68]|1[02-8]|28|3[0-25]|4[3578]|5[046-9]|6[02-5]|7[028])|9(?:0[1346-9]|1[02-9]|2[0589]|3[01678]|4[0179]|5[12469]|7[0-3589]|8[0459]))[2-9]\\d{6}', possible_number_pattern='\\d{7}(?:\\d{3})?', example_number='2015555555'),
mobile=PhoneNumberDesc(national_number_pattern='(?:2(?:0[1-35-9]|1[02-9]|2[04589]|3[149]|4[08]|5[1-46]|6[0279]|7[026]|8[13])|3(?:0[1-57-9]|1[02-9]|2[0135]|3[014679]|4[67]|5[12]|6[014]|8[56])|4(?:0[124-9]|1[02-579]|2[3-5]|3[0245]|4[0235]|58|69|7[0589]|8[04])|5(?:0[1-57-9]|1[0235-8]|20|3[0149]|4[01]|5[19]|6[1-37]|7[013-5]|8[056])|6(?:0[1-35-9]|1[024-9]|2[03689]|3[016]|4[16]|5[017]|6[0-279]|78|8[12])|7(?:0[1-46-8]|1[02-9]|2[0457]|3[1247]|4[07]|5[47]|6[02359]|7[02-59]|8[156])|8(?:0[1-68]|1[02-8]|28|3[0-25]|4[3578]|5[046-9]|6[02-5]|7[028])|9(?:0[1346-9]|1[02-9]|2[0589]|3[01678]|4[0179]|5[12469]|7[0-3589]|8[0459]))[2-9]\\d{6}', possible_number_pattern='\\d{7}(?:\\d{3})?', example_number='2015555555'),
toll_free=PhoneNumberDesc(national_number_pattern='8(?:00|44|55|66|77|88)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='8002345678'),
premium_rate=PhoneNumberDesc(national_number_pattern='900[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='9002345678'),
shared_cost=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
personal_number=PhoneNumberDesc(national_number_pattern='5(?:00|33|44|66|77)[2-9]\\d{6}', possible_number_pattern='\\d{10}', example_number='5002345678'),
voip=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
pager=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
uan=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
voicemail=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
no_international_dialling=PhoneNumberDesc(national_number_pattern='NA', possible_number_pattern='NA'),
national_prefix='1',
national_prefix_for_parsing='1',
number_format=[NumberFormat(pattern='(\\d{3})(\\d{4})', format='\\1-\\2', national_prefix_optional_when_formatting=True),
NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='(\\1) \\2-\\3', national_prefix_optional_when_formatting=True)],
intl_number_format=[NumberFormat(pattern='(\\d{3})(\\d{3})(\\d{4})', format='\\1-\\2-\\3')],
main_country_for_code=True,
mobile_number_portable_region=True)
| apache-2.0 |
Yannig/ansible | test/units/modules/remote_management/oneview/test_oneview_ethernet_network.py | 78 | 14524 | # -*- coding: utf-8 -*-
#
# Copyright (2016-2017) Hewlett Packard Enterprise Development LP
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import yaml
from ansible.compat.tests import unittest, mock
from oneview_module_loader import EthernetNetworkModule, OneViewModuleResourceNotFound
from hpe_test_utils import OneViewBaseTestCase
FAKE_MSG_ERROR = 'Fake message error'
DEFAULT_ETHERNET_NAME = 'Test Ethernet Network'
RENAMED_ETHERNET = 'Renamed Ethernet Network'
DEFAULT_ENET_TEMPLATE = dict(
name=DEFAULT_ETHERNET_NAME,
vlanId=200,
ethernetNetworkType="Tagged",
purpose="General",
smartLink=False,
privateNetwork=False,
connectionTemplateUri=None
)
PARAMS_FOR_PRESENT = dict(
config='config.json',
state='present',
data=dict(name=DEFAULT_ETHERNET_NAME)
)
PARAMS_TO_RENAME = dict(
config='config.json',
state='present',
data=dict(name=DEFAULT_ETHERNET_NAME,
newName=RENAMED_ETHERNET)
)
YAML_PARAMS_WITH_CHANGES = """
config: "config.json"
state: present
data:
name: 'Test Ethernet Network'
purpose: Management
connectionTemplateUri: ~
bandwidth:
maximumBandwidth: 3000
typicalBandwidth: 2000
"""
YAML_RESET_CONNECTION_TEMPLATE = """
config: "{{ config }}"
state: default_bandwidth_reset
data:
name: 'network name'
"""
PARAMS_FOR_SCOPES_SET = dict(
config='config.json',
state='present',
data=dict(name=DEFAULT_ETHERNET_NAME)
)
PARAMS_FOR_ABSENT = dict(
config='config.json',
state='absent',
data=dict(name=DEFAULT_ETHERNET_NAME)
)
PARAMS_FOR_BULK_CREATED = dict(
config='config.json',
state='present',
data=dict(namePrefix="TestNetwork", vlanIdRange="1-2,5,9-10")
)
DEFAULT_BULK_ENET_TEMPLATE = [
{'name': 'TestNetwork_1', 'vlanId': 1},
{'name': 'TestNetwork_2', 'vlanId': 2},
{'name': 'TestNetwork_5', 'vlanId': 5},
{'name': 'TestNetwork_9', 'vlanId': 9},
{'name': 'TestNetwork_10', 'vlanId': 10},
]
DICT_PARAMS_WITH_CHANGES = yaml.load(YAML_PARAMS_WITH_CHANGES)["data"]
class EthernetNetworkModuleSpec(unittest.TestCase,
OneViewBaseTestCase):
"""
OneViewBaseTestCase provides the mocks used in this test case
"""
def setUp(self):
self.configure_mocks(self, EthernetNetworkModule)
self.resource = self.mock_ov_client.ethernet_networks
def test_should_create_new_ethernet_network(self):
self.resource.get_by.return_value = []
self.resource.create.return_value = DEFAULT_ENET_TEMPLATE
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_CREATED,
ansible_facts=dict(ethernet_network=DEFAULT_ENET_TEMPLATE)
)
def test_should_not_update_when_data_is_equals(self):
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.mock_ansible_module.params = PARAMS_FOR_PRESENT
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EthernetNetworkModule.MSG_ALREADY_PRESENT,
ansible_facts=dict(ethernet_network=DEFAULT_ENET_TEMPLATE)
)
def test_update_when_data_has_modified_attributes(self):
data_merged = DEFAULT_ENET_TEMPLATE.copy()
data_merged['purpose'] = 'Management'
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.resource.update.return_value = data_merged
self.mock_ov_client.connection_templates.get.return_value = {"uri": "uri"}
self.mock_ansible_module.params = yaml.load(YAML_PARAMS_WITH_CHANGES)
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_UPDATED,
ansible_facts=dict(ethernet_network=data_merged)
)
def test_update_when_only_bandwidth_has_modified_attributes(self):
self.resource.get_by.return_value = [DICT_PARAMS_WITH_CHANGES]
self.mock_ov_client.connection_templates.get.return_value = {"uri": "uri"}
self.mock_ansible_module.params = yaml.load(YAML_PARAMS_WITH_CHANGES)
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_UPDATED,
ansible_facts=dict(ethernet_network=DICT_PARAMS_WITH_CHANGES)
)
def test_update_when_data_has_modified_attributes_but_bandwidth_is_equal(self):
data_merged = DEFAULT_ENET_TEMPLATE.copy()
data_merged['purpose'] = 'Management'
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.resource.update.return_value = data_merged
self.mock_ov_client.connection_templates.get.return_value = {
"bandwidth": DICT_PARAMS_WITH_CHANGES['bandwidth']}
self.mock_ansible_module.params = yaml.load(YAML_PARAMS_WITH_CHANGES)
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_UPDATED,
ansible_facts=dict(ethernet_network=data_merged)
)
def test_update_successfully_even_when_connection_template_uri_not_exists(self):
data_merged = DEFAULT_ENET_TEMPLATE.copy()
del data_merged['connectionTemplateUri']
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.resource.update.return_value = data_merged
self.mock_ansible_module.params = yaml.load(YAML_PARAMS_WITH_CHANGES)
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_UPDATED,
ansible_facts=dict(ethernet_network=data_merged)
)
def test_rename_when_resource_exists(self):
data_merged = DEFAULT_ENET_TEMPLATE.copy()
data_merged['name'] = RENAMED_ETHERNET
params_to_rename = PARAMS_TO_RENAME.copy()
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.resource.update.return_value = data_merged
self.mock_ansible_module.params = params_to_rename
EthernetNetworkModule().run()
self.resource.update.assert_called_once_with(data_merged)
def test_create_with_new_name_when_resource_not_exists(self):
data_merged = DEFAULT_ENET_TEMPLATE.copy()
data_merged['name'] = RENAMED_ETHERNET
params_to_rename = PARAMS_TO_RENAME.copy()
self.resource.get_by.return_value = []
self.resource.create.return_value = DEFAULT_ENET_TEMPLATE
self.mock_ansible_module.params = params_to_rename
EthernetNetworkModule().run()
self.resource.create.assert_called_once_with(PARAMS_TO_RENAME['data'])
def test_should_remove_ethernet_network(self):
self.resource.get_by.return_value = [DEFAULT_ENET_TEMPLATE]
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_DELETED
)
def test_should_do_nothing_when_ethernet_network_not_exist(self):
self.resource.get_by.return_value = []
self.mock_ansible_module.params = PARAMS_FOR_ABSENT
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
msg=EthernetNetworkModule.MSG_ALREADY_ABSENT
)
def test_should_create_all_ethernet_networks(self):
self.resource.get_range.side_effect = [[], DEFAULT_BULK_ENET_TEMPLATE]
self.resource.create_bulk.return_value = DEFAULT_BULK_ENET_TEMPLATE
self.mock_ansible_module.params = PARAMS_FOR_BULK_CREATED
EthernetNetworkModule().run()
self.resource.create_bulk.assert_called_once_with(
dict(namePrefix="TestNetwork", vlanIdRange="1-2,5,9-10"))
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_BULK_CREATED,
ansible_facts=dict(ethernet_network_bulk=DEFAULT_BULK_ENET_TEMPLATE))
def test_should_create_missing_ethernet_networks(self):
enet_get_range_return = [
{'name': 'TestNetwork_1', 'vlanId': 1},
{'name': 'TestNetwork_2', 'vlanId': 2},
]
self.resource.get_range.side_effect = [enet_get_range_return, DEFAULT_BULK_ENET_TEMPLATE]
self.resource.dissociate_values_or_ranges.return_value = [1, 2, 5, 9, 10]
self.mock_ansible_module.params = PARAMS_FOR_BULK_CREATED
EthernetNetworkModule().run()
self.resource.create_bulk.assert_called_once_with(
dict(namePrefix="TestNetwork", vlanIdRange="5,9,10"))
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True, msg=EthernetNetworkModule.MSG_MISSING_BULK_CREATED,
ansible_facts=dict(ethernet_network_bulk=DEFAULT_BULK_ENET_TEMPLATE))
def test_should_create_missing_ethernet_networks_with_just_one_difference(self):
enet_get_range_return = [
{'name': 'TestNetwork_1', 'vlanId': 1},
{'name': 'TestNetwork_2', 'vlanId': 2},
]
self.resource.get_range.side_effect = [enet_get_range_return, DEFAULT_BULK_ENET_TEMPLATE]
self.resource.dissociate_values_or_ranges.return_value = [1, 2, 5]
self.mock_ansible_module.params = PARAMS_FOR_BULK_CREATED
EthernetNetworkModule().run()
self.resource.create_bulk.assert_called_once_with({'vlanIdRange': '5-5', 'namePrefix': 'TestNetwork'})
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
msg=EthernetNetworkModule.MSG_MISSING_BULK_CREATED,
ansible_facts=dict(ethernet_network_bulk=DEFAULT_BULK_ENET_TEMPLATE))
def test_should_do_nothing_when_ethernet_networks_already_exist(self):
self.resource.get_range.return_value = DEFAULT_BULK_ENET_TEMPLATE
self.resource.dissociate_values_or_ranges.return_value = [1, 2, 5, 9, 10]
self.mock_ansible_module.params = PARAMS_FOR_BULK_CREATED
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False, msg=EthernetNetworkModule.MSG_BULK_ALREADY_EXIST,
ansible_facts=dict(ethernet_network_bulk=DEFAULT_BULK_ENET_TEMPLATE))
def test_reset_successfully(self):
self.resource.get_by.return_value = [DICT_PARAMS_WITH_CHANGES]
self.mock_ov_client.connection_templates.update.return_value = {'result': 'success'}
self.mock_ov_client.connection_templates.get.return_value = {
"bandwidth": DICT_PARAMS_WITH_CHANGES['bandwidth']}
self.mock_ov_client.connection_templates.get_default.return_value = {"bandwidth": {
"max": 1
}}
self.mock_ansible_module.params = yaml.load(YAML_RESET_CONNECTION_TEMPLATE)
EthernetNetworkModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True, msg=EthernetNetworkModule.MSG_CONNECTION_TEMPLATE_RESET,
ansible_facts=dict(ethernet_network_connection_template={'result': 'success'}))
def test_should_fail_when_reset_not_existing_ethernet_network(self):
self.resource.get_by.return_value = [None]
self.mock_ansible_module.params = yaml.load(YAML_RESET_CONNECTION_TEMPLATE)
EthernetNetworkModule().run()
self.mock_ansible_module.fail_json.assert_called_once_with(
exception=mock.ANY,
msg=EthernetNetworkModule.MSG_ETHERNET_NETWORK_NOT_FOUND
)
def test_update_scopes_when_different(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = DEFAULT_ENET_TEMPLATE.copy()
resource_data['scopeUris'] = ['fake']
resource_data['uri'] = 'rest/ethernet/fake'
self.resource.get_by.return_value = [resource_data]
patch_return = resource_data.copy()
patch_return['scopeUris'] = ['test']
self.resource.patch.return_value = patch_return
EthernetNetworkModule().run()
self.resource.patch.assert_called_once_with('rest/ethernet/fake',
operation='replace',
path='/scopeUris',
value=['test'])
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=True,
ansible_facts=dict(ethernet_network=patch_return),
msg=EthernetNetworkModule.MSG_UPDATED
)
def test_should_do_nothing_when_scopes_are_the_same(self):
params_to_scope = PARAMS_FOR_PRESENT.copy()
params_to_scope['data']['scopeUris'] = ['test']
self.mock_ansible_module.params = params_to_scope
resource_data = DEFAULT_ENET_TEMPLATE.copy()
resource_data['scopeUris'] = ['test']
self.resource.get_by.return_value = [resource_data]
EthernetNetworkModule().run()
self.resource.patch.not_been_called()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(ethernet_network=resource_data),
msg=EthernetNetworkModule.MSG_ALREADY_PRESENT
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
garg10may/youtube-dl | youtube_dl/extractor/orf.py | 111 | 10125 | # coding: utf-8
from __future__ import unicode_literals
import json
import re
import calendar
import datetime
from .common import InfoExtractor
from ..utils import (
HEADRequest,
unified_strdate,
ExtractorError,
strip_jsonp,
int_or_none,
float_or_none,
determine_ext,
remove_end,
)
class ORFTVthekIE(InfoExtractor):
IE_NAME = 'orf:tvthek'
IE_DESC = 'ORF TVthek'
_VALID_URL = r'https?://tvthek\.orf\.at/(?:programs/.+?/episodes|topics?/.+?|program/[^/]+)/(?P<id>\d+)'
_TESTS = [{
'url': 'http://tvthek.orf.at/program/Aufgetischt/2745173/Aufgetischt-Mit-der-Steirischen-Tafelrunde/8891389',
'playlist': [{
'md5': '2942210346ed779588f428a92db88712',
'info_dict': {
'id': '8896777',
'ext': 'mp4',
'title': 'Aufgetischt: Mit der Steirischen Tafelrunde',
'description': 'md5:c1272f0245537812d4e36419c207b67d',
'duration': 2668,
'upload_date': '20141208',
},
}],
'skip': 'Blocked outside of Austria / Germany',
}, {
'url': 'http://tvthek.orf.at/topic/Im-Wandel-der-Zeit/8002126/Best-of-Ingrid-Thurnher/7982256',
'playlist': [{
'md5': '68f543909aea49d621dfc7703a11cfaf',
'info_dict': {
'id': '7982259',
'ext': 'mp4',
'title': 'Best of Ingrid Thurnher',
'upload_date': '20140527',
'description': 'Viele Jahre war Ingrid Thurnher das "Gesicht" der ZIB 2. Vor ihrem Wechsel zur ZIB 2 im jahr 1995 moderierte sie unter anderem "Land und Leute", "Österreich-Bild" und "Niederösterreich heute".',
}
}],
'_skip': 'Blocked outside of Austria / Germany',
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
data_json = self._search_regex(
r'initializeAdworx\((.+?)\);\n', webpage, 'video info')
all_data = json.loads(data_json)
def get_segments(all_data):
for data in all_data:
if data['name'] in (
'Tracker::EPISODE_DETAIL_PAGE_OVER_PROGRAM',
'Tracker::EPISODE_DETAIL_PAGE_OVER_TOPIC'):
return data['values']['segments']
sdata = get_segments(all_data)
if not sdata:
raise ExtractorError('Unable to extract segments')
def quality_to_int(s):
m = re.search('([0-9]+)', s)
if m is None:
return -1
return int(m.group(1))
entries = []
for sd in sdata:
video_id = sd['id']
formats = [{
'preference': -10 if fd['delivery'] == 'hls' else None,
'format_id': '%s-%s-%s' % (
fd['delivery'], fd['quality'], fd['quality_string']),
'url': fd['src'],
'protocol': fd['protocol'],
'quality': quality_to_int(fd['quality']),
} for fd in sd['playlist_item_array']['sources']]
# Check for geoblocking.
# There is a property is_geoprotection, but that's always false
geo_str = sd.get('geoprotection_string')
if geo_str:
try:
http_url = next(
f['url']
for f in formats
if re.match(r'^https?://.*\.mp4$', f['url']))
except StopIteration:
pass
else:
req = HEADRequest(http_url)
self._request_webpage(
req, video_id,
note='Testing for geoblocking',
errnote=((
'This video seems to be blocked outside of %s. '
'You may want to try the streaming-* formats.')
% geo_str),
fatal=False)
self._sort_formats(formats)
upload_date = unified_strdate(sd['created_date'])
entries.append({
'_type': 'video',
'id': video_id,
'title': sd['header'],
'formats': formats,
'description': sd.get('description'),
'duration': int(sd['duration_in_seconds']),
'upload_date': upload_date,
'thumbnail': sd.get('image_full_url'),
})
return {
'_type': 'playlist',
'entries': entries,
'id': playlist_id,
}
class ORFOE1IE(InfoExtractor):
IE_NAME = 'orf:oe1'
IE_DESC = 'Radio Österreich 1'
_VALID_URL = r'http://oe1\.orf\.at/(?:programm/|konsole.*?#\?track_id=)(?P<id>[0-9]+)'
# Audios on ORF radio are only available for 7 days, so we can't add tests.
_TEST = {
'url': 'http://oe1.orf.at/konsole?show=on_demand#?track_id=394211',
'only_matching': True,
}
def _real_extract(self, url):
show_id = self._match_id(url)
data = self._download_json(
'http://oe1.orf.at/programm/%s/konsole' % show_id,
show_id
)
timestamp = datetime.datetime.strptime('%s %s' % (
data['item']['day_label'],
data['item']['time']
), '%d.%m.%Y %H:%M')
unix_timestamp = calendar.timegm(timestamp.utctimetuple())
return {
'id': show_id,
'title': data['item']['title'],
'url': data['item']['url_stream'],
'ext': 'mp3',
'description': data['item'].get('info'),
'timestamp': unix_timestamp
}
class ORFFM4IE(InfoExtractor):
IE_NAME = 'orf:fm4'
IE_DESC = 'radio FM4'
_VALID_URL = r'http://fm4\.orf\.at/7tage/?#(?P<date>[0-9]+)/(?P<show>\w+)'
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
show_date = mobj.group('date')
show_id = mobj.group('show')
data = self._download_json(
'http://audioapi.orf.at/fm4/json/2.0/broadcasts/%s/4%s' % (show_date, show_id),
show_id
)
def extract_entry_dict(info, title, subtitle):
return {
'id': info['loopStreamId'].replace('.mp3', ''),
'url': 'http://loopstream01.apa.at/?channel=fm4&id=%s' % info['loopStreamId'],
'title': title,
'description': subtitle,
'duration': (info['end'] - info['start']) / 1000,
'timestamp': info['start'] / 1000,
'ext': 'mp3'
}
entries = [extract_entry_dict(t, data['title'], data['subtitle']) for t in data['streams']]
return {
'_type': 'playlist',
'id': show_id,
'title': data['title'],
'description': data['subtitle'],
'entries': entries
}
class ORFIPTVIE(InfoExtractor):
IE_NAME = 'orf:iptv'
IE_DESC = 'iptv.ORF.at'
_VALID_URL = r'http://iptv\.orf\.at/(?:#/)?stories/(?P<id>\d+)'
_TEST = {
'url': 'http://iptv.orf.at/stories/2275236/',
'md5': 'c8b22af4718a4b4af58342529453e3e5',
'info_dict': {
'id': '350612',
'ext': 'flv',
'title': 'Weitere Evakuierungen um Vulkan Calbuco',
'description': 'md5:d689c959bdbcf04efeddedbf2299d633',
'duration': 68.197,
'thumbnail': 're:^https?://.*\.jpg$',
'upload_date': '20150425',
},
}
def _real_extract(self, url):
story_id = self._match_id(url)
webpage = self._download_webpage(
'http://iptv.orf.at/stories/%s' % story_id, story_id)
video_id = self._search_regex(
r'data-video(?:id)?="(\d+)"', webpage, 'video id')
data = self._download_json(
'http://bits.orf.at/filehandler/static-api/json/current/data.json?file=%s' % video_id,
video_id)[0]
duration = float_or_none(data['duration'], 1000)
video = data['sources']['default']
load_balancer_url = video['loadBalancerUrl']
abr = int_or_none(video.get('audioBitrate'))
vbr = int_or_none(video.get('bitrate'))
fps = int_or_none(video.get('videoFps'))
width = int_or_none(video.get('videoWidth'))
height = int_or_none(video.get('videoHeight'))
thumbnail = video.get('preview')
rendition = self._download_json(
load_balancer_url, video_id, transform_source=strip_jsonp)
f = {
'abr': abr,
'vbr': vbr,
'fps': fps,
'width': width,
'height': height,
}
formats = []
for format_id, format_url in rendition['redirect'].items():
if format_id == 'rtmp':
ff = f.copy()
ff.update({
'url': format_url,
'format_id': format_id,
})
formats.append(ff)
elif determine_ext(format_url) == 'f4m':
formats.extend(self._extract_f4m_formats(
format_url, video_id, f4m_id=format_id))
elif determine_ext(format_url) == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', m3u8_id=format_id))
else:
continue
self._sort_formats(formats)
title = remove_end(self._og_search_title(webpage), ' - iptv.ORF.at')
description = self._og_search_description(webpage)
upload_date = unified_strdate(self._html_search_meta(
'dc.date', webpage, 'upload date'))
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'thumbnail': thumbnail,
'upload_date': upload_date,
'formats': formats,
}
| unlicense |
gregswift/ansible | lib/ansible/utils/module_docs_fragments/aws.py | 232 | 3156 | # (c) 2014, Will Thames <will@thames.id.au>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
class ModuleDocFragment(object):
# AWS only documentation fragment
DOCUMENTATION = """
options:
ec2_url:
description:
- Url to use to connect to EC2 or your Eucalyptus cloud (by default the module will use EC2 endpoints). Ignored for modules where region is required. Must be specified for all other modules if region is not used. If not set then the value of the EC2_URL environment variable, if any, is used.
required: false
default: null
aliases: []
aws_secret_key:
description:
- AWS secret key. If not set then the value of the AWS_SECRET_ACCESS_KEY, AWS_SECRET_KEY, or EC2_SECRET_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_secret_key', 'secret_key' ]
aws_access_key:
description:
- AWS access key. If not set then the value of the AWS_ACCESS_KEY_ID, AWS_ACCESS_KEY or EC2_ACCESS_KEY environment variable is used.
required: false
default: null
aliases: [ 'ec2_access_key', 'access_key' ]
security_token:
description:
- AWS STS security token. If not set then the value of the AWS_SECURITY_TOKEN or EC2_SECURITY_TOKEN environment variable is used.
required: false
default: null
aliases: [ 'access_token' ]
version_added: "1.6"
validate_certs:
description:
- When set to "no", SSL certificates will not be validated for boto versions >= 2.6.0.
required: false
default: "yes"
choices: ["yes", "no"]
aliases: []
version_added: "1.5"
profile:
description:
- uses a boto profile. Only works with boto >= 2.24.0
required: false
default: null
aliases: []
version_added: "1.6"
requirements:
- "python >= 2.6"
- boto
notes:
- If parameters are not set within the module, the following
environment variables can be used in decreasing order of precedence
C(AWS_URL) or C(EC2_URL),
C(AWS_ACCESS_KEY_ID) or C(AWS_ACCESS_KEY) or C(EC2_ACCESS_KEY),
C(AWS_SECRET_ACCESS_KEY) or C(AWS_SECRET_KEY) or C(EC2_SECRET_KEY),
C(AWS_SECURITY_TOKEN) or C(EC2_SECURITY_TOKEN),
C(AWS_REGION) or C(EC2_REGION)
- Ansible uses the boto configuration file (typically ~/.boto) if no
credentials are provided. See http://boto.readthedocs.org/en/latest/boto_config_tut.html
- C(AWS_REGION) or C(EC2_REGION) can be typically be used to specify the
AWS region, when required, but this can also be configured in the boto config file
"""
| gpl-3.0 |
EnviroCentre/jython-upgrade | jython/lib/test/test_telnetlib.py | 41 | 15781 | import socket
import telnetlib
import time
import Queue
import unittest
from unittest import TestCase
from test import test_support
threading = test_support.import_module('threading')
HOST = test_support.HOST
EOF_sigil = object()
def server(evt, serv, dataq=None):
""" Open a tcp server in three steps
1) set evt to true to let the parent know we are ready
2) [optional] if is not False, write the list of data from dataq.get()
to the socket.
"""
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
if dataq:
data = ''
new_data = dataq.get(True, 0.5)
dataq.task_done()
for item in new_data:
if item == EOF_sigil:
break
if type(item) in [int, float]:
time.sleep(item)
else:
data += item
written = conn.send(data)
data = data[written:]
conn.close()
except socket.timeout:
pass
finally:
serv.close()
class GeneralTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(60) # Safety net. Look issue 11812
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock))
self.thread.setDaemon(True)
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
def testBasic(self):
# connects
telnet = telnetlib.Telnet(HOST, self.port)
telnet.sock.close()
def testTimeoutDefault(self):
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutNone(self):
# None, having other default
self.assertTrue(socket.getdefaulttimeout() is None)
socket.setdefaulttimeout(30)
try:
telnet = telnetlib.Telnet(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertTrue(telnet.sock.gettimeout() is None)
telnet.sock.close()
def testTimeoutValue(self):
telnet = telnetlib.Telnet(HOST, self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def testTimeoutOpen(self):
telnet = telnetlib.Telnet()
telnet.open(HOST, self.port, timeout=30)
self.assertEqual(telnet.sock.gettimeout(), 30)
telnet.sock.close()
def _read_setUp(self):
self.evt = threading.Event()
self.dataq = Queue.Queue()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock, self.dataq))
self.thread.start()
self.evt.wait()
def _read_tearDown(self):
self.thread.join()
class ReadTests(TestCase):
setUp = _read_setUp
tearDown = _read_tearDown
# use a similar approach to testing timeouts as test_timeout.py
# these will never pass 100% but make the fuzz big enough that it is rare
block_long = 0.6
block_short = 0.3
def test_read_until_A(self):
"""
read_until(expected, [timeout])
Read until the expected string has been seen, or a timeout is
hit (default is no timeout); may block.
"""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_until_B(self):
# test the timeout - it does NOT raise socket.timeout
want = ['hello', self.block_long, 'not seen', EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_until('not seen', self.block_short)
self.assertEqual(data, want[0])
self.assertEqual(telnet.read_all(), 'not seen')
def test_read_until_with_poll(self):
"""Use select.poll() to implement telnet.read_until()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
if not telnet._has_poll:
raise unittest.SkipTest('select.poll() is required')
telnet._has_poll = True
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_until_with_select(self):
"""Use select.select() to implement telnet.read_until()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
telnet._has_poll = False
self.dataq.join()
data = telnet.read_until('match')
self.assertEqual(data, ''.join(want[:-2]))
def test_read_all_A(self):
"""
read_all()
Read all data until EOF; may block.
"""
want = ['x' * 500, 'y' * 500, 'z' * 500, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_all()
self.assertEqual(data, ''.join(want[:-1]))
return
def _test_blocking(self, func):
self.dataq.put([self.block_long, EOF_sigil])
self.dataq.join()
start = time.time()
data = func()
self.assertTrue(self.block_short <= time.time() - start)
def test_read_all_B(self):
self._test_blocking(telnetlib.Telnet(HOST, self.port).read_all)
def test_read_all_C(self):
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
telnet.read_all()
telnet.read_all() # shouldn't raise
def test_read_some_A(self):
"""
read_some()
Read at least one byte or EOF; may block.
"""
# test 'at least one byte'
want = ['x' * 500, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
data = telnet.read_all()
self.assertTrue(len(data) >= 1)
def test_read_some_B(self):
# test EOF
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
self.assertEqual('', telnet.read_some())
def test_read_some_C(self):
self._test_blocking(telnetlib.Telnet(HOST, self.port).read_some)
def _test_read_any_eager_A(self, func_name):
"""
read_very_eager()
Read all data available already queued or on the socket,
without blocking.
"""
want = [self.block_long, 'x' * 100, 'y' * 100, EOF_sigil]
expects = want[1] + want[2]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
func = getattr(telnet, func_name)
data = ''
while True:
try:
data += func()
self.assertTrue(expects.startswith(data))
except EOFError:
break
self.assertEqual(expects, data)
def _test_read_any_eager_B(self, func_name):
# test EOF
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
func = getattr(telnet, func_name)
self.assertRaises(EOFError, func)
# read_eager and read_very_eager make the same gaurantees
# (they behave differently but we only test the gaurantees)
def test_read_very_eager_A(self):
self._test_read_any_eager_A('read_very_eager')
def test_read_very_eager_B(self):
self._test_read_any_eager_B('read_very_eager')
def test_read_eager_A(self):
self._test_read_any_eager_A('read_eager')
def test_read_eager_B(self):
self._test_read_any_eager_B('read_eager')
# NB -- we need to test the IAC block which is mentioned in the docstring
# but not in the module docs
def _test_read_any_lazy_B(self, func_name):
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
func = getattr(telnet, func_name)
telnet.fill_rawq()
self.assertRaises(EOFError, func)
def test_read_lazy_A(self):
want = ['x' * 100, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
self.assertEqual('', telnet.read_lazy())
data = ''
while True:
try:
read_data = telnet.read_lazy()
data += read_data
if not read_data:
telnet.fill_rawq()
except EOFError:
break
self.assertTrue(want[0].startswith(data))
self.assertEqual(data, want[0])
def test_read_lazy_B(self):
self._test_read_any_lazy_B('read_lazy')
def test_read_very_lazy_A(self):
want = ['x' * 100, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
time.sleep(self.block_short)
self.assertEqual('', telnet.read_very_lazy())
data = ''
while True:
try:
read_data = telnet.read_very_lazy()
except EOFError:
break
data += read_data
if not read_data:
telnet.fill_rawq()
self.assertEqual('', telnet.cookedq)
telnet.process_rawq()
self.assertTrue(want[0].startswith(data))
self.assertEqual(data, want[0])
def test_read_very_lazy_B(self):
self._test_read_any_lazy_B('read_very_lazy')
class nego_collector(object):
def __init__(self, sb_getter=None):
self.seen = ''
self.sb_getter = sb_getter
self.sb_seen = ''
def do_nego(self, sock, cmd, opt):
self.seen += cmd + opt
if cmd == tl.SE and self.sb_getter:
sb_data = self.sb_getter()
self.sb_seen += sb_data
tl = telnetlib
class OptionTests(TestCase):
setUp = _read_setUp
tearDown = _read_tearDown
# RFC 854 commands
cmds = [tl.AO, tl.AYT, tl.BRK, tl.EC, tl.EL, tl.GA, tl.IP, tl.NOP]
def _test_command(self, data):
""" helper for testing IAC + cmd """
self.setUp()
self.dataq.put(data)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
nego = nego_collector()
telnet.set_option_negotiation_callback(nego.do_nego)
txt = telnet.read_all()
cmd = nego.seen
self.assertTrue(len(cmd) > 0) # we expect at least one command
self.assertIn(cmd[0], self.cmds)
self.assertEqual(cmd[1], tl.NOOPT)
self.assertEqual(len(''.join(data[:-1])), len(txt + cmd))
nego.sb_getter = None # break the nego => telnet cycle
self.tearDown()
def test_IAC_commands(self):
# reset our setup
self.dataq.put([EOF_sigil])
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
self.tearDown()
for cmd in self.cmds:
self._test_command(['x' * 100, tl.IAC + cmd, 'y'*100, EOF_sigil])
self._test_command(['x' * 10, tl.IAC + cmd, 'y'*10, EOF_sigil])
self._test_command([tl.IAC + cmd, EOF_sigil])
# all at once
self._test_command([tl.IAC + cmd for (cmd) in self.cmds] + [EOF_sigil])
self.assertEqual('', telnet.read_sb_data())
def test_SB_commands(self):
# RFC 855, subnegotiations portion
send = [tl.IAC + tl.SB + tl.IAC + tl.SE,
tl.IAC + tl.SB + tl.IAC + tl.IAC + tl.IAC + tl.SE,
tl.IAC + tl.SB + tl.IAC + tl.IAC + 'aa' + tl.IAC + tl.SE,
tl.IAC + tl.SB + 'bb' + tl.IAC + tl.IAC + tl.IAC + tl.SE,
tl.IAC + tl.SB + 'cc' + tl.IAC + tl.IAC + 'dd' + tl.IAC + tl.SE,
EOF_sigil,
]
self.dataq.put(send)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
nego = nego_collector(telnet.read_sb_data)
telnet.set_option_negotiation_callback(nego.do_nego)
txt = telnet.read_all()
self.assertEqual(txt, '')
want_sb_data = tl.IAC + tl.IAC + 'aabb' + tl.IAC + 'cc' + tl.IAC + 'dd'
self.assertEqual(nego.sb_seen, want_sb_data)
self.assertEqual('', telnet.read_sb_data())
nego.sb_getter = None # break the nego => telnet cycle
class ExpectTests(TestCase):
def setUp(self):
self.evt = threading.Event()
self.dataq = Queue.Queue()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(10)
self.port = test_support.bind_port(self.sock)
self.thread = threading.Thread(target=server, args=(self.evt,self.sock,
self.dataq))
self.thread.start()
self.evt.wait()
def tearDown(self):
self.thread.join()
# use a similar approach to testing timeouts as test_timeout.py
# these will never pass 100% but make the fuzz big enough that it is rare
block_long = 0.6
block_short = 0.3
def test_expect_A(self):
"""
expect(expected, [timeout])
Read until the expected string has been seen, or a timeout is
hit (default is no timeout); may block.
"""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_expect_B(self):
# test the timeout - it does NOT raise socket.timeout
want = ['hello', self.block_long, 'not seen', EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
self.dataq.join()
(_,_,data) = telnet.expect(['not seen'], self.block_short)
self.assertEqual(data, want[0])
self.assertEqual(telnet.read_all(), 'not seen')
def test_expect_with_poll(self):
"""Use select.poll() to implement telnet.expect()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
if not telnet._has_poll:
raise unittest.SkipTest('select.poll() is required')
telnet._has_poll = True
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_expect_with_select(self):
"""Use select.select() to implement telnet.expect()."""
want = ['x' * 10, 'match', 'y' * 10, EOF_sigil]
self.dataq.put(want)
telnet = telnetlib.Telnet(HOST, self.port)
telnet._has_poll = False
self.dataq.join()
(_,_,data) = telnet.expect(['match'])
self.assertEqual(data, ''.join(want[:-2]))
def test_main(verbose=None):
test_support.run_unittest(GeneralTests, ReadTests, OptionTests,
ExpectTests)
if __name__ == '__main__':
test_main()
| mit |
atlassian/dd-agent | checks.d/jenkins.py | 31 | 7720 | # stdlib
from collections import defaultdict
from glob import glob
import os
import time
from xml.etree.ElementTree import ElementTree
# project
from checks import AgentCheck
from util import get_hostname
class Skip(Exception):
"""
Raised by :class:`Jenkins` when it comes across
a build or job that should be excluded from being checked.
"""
def __init__(self, reason, dir_name):
message = 'skipping build or job at %s because %s' % (dir_name, reason)
Exception.__init__(self, message)
class Jenkins(AgentCheck):
datetime_format = '%Y-%m-%d_%H-%M-%S'
def __init__(self, name, init_config, agentConfig):
AgentCheck.__init__(self, name, init_config, agentConfig)
self.high_watermarks = {}
def _timestamp_from_build_file(self, dir_name, tree):
timestamp = tree.find('timestamp')
if timestamp is None or not timestamp.text:
raise Skip('the timestamp cannot be found', dir_name)
else:
return int(timestamp.text) / 1000.0
def _timestamp_from_dirname(self, dir_name):
if not os.path.isdir(dir_name):
raise Skip('its not a build directory', dir_name)
try:
# Parse the timestamp from the directory name
date_str = os.path.basename(dir_name)
time_tuple = time.strptime(date_str, self.datetime_format)
return time.mktime(time_tuple)
except ValueError:
return None
def _get_build_metadata(self, dir_name, watermark):
if os.path.exists(os.path.join(dir_name, 'jenkins_build.tar.gz')):
raise Skip('the build has already been archived', dir_name)
timestamp = self._timestamp_from_dirname(dir_name)
# This is not the latest build
if timestamp is not None and timestamp <= watermark:
return None
# Read the build.xml metadata file that Jenkins generates
build_metadata = os.path.join(dir_name, 'build.xml')
if not os.access(build_metadata, os.R_OK):
self.log.debug("Can't read build file at %s" % (build_metadata))
raise Exception("Can't access build.xml at %s" % (build_metadata))
else:
tree = ElementTree()
tree.parse(build_metadata)
if timestamp is None:
try:
timestamp = self._timestamp_from_build_file(dir_name, tree)
# This is not the latest build
if timestamp <= watermark:
return None
except ValueError:
return None
keys = ['result', 'number', 'duration']
kv_pairs = ((k, tree.find(k)) for k in keys)
d = dict([(k, v.text) for k, v in kv_pairs if v is not None])
d['timestamp'] = timestamp
try:
d['branch'] = tree.find('actions')\
.find('hudson.plugins.git.util.BuildData')\
.find('buildsByBranchName')\
.find('entry')\
.find('hudson.plugins.git.util.Build')\
.find('revision')\
.find('branches')\
.find('hudson.plugins.git.Branch')\
.find('name')\
.text
except Exception:
pass
return d
def _get_build_results(self, instance_key, job_dir):
job_name = os.path.basename(job_dir)
try:
dirs = glob(os.path.join(job_dir, 'builds', '*_*'))
# Before Jenkins v1.597 the build folders were named with a timestamp (eg: 2015-03-10_19-59-29)
# Starting from Jenkins v1.597 they are named after the build ID (1, 2, 3...)
# So we need try both format when trying to find the latest build and parsing build.xml
if len(dirs) == 0:
dirs = glob(os.path.join(job_dir, 'builds', '[0-9]*'))
if len(dirs) > 0:
# versions of Jenkins > 1.597 need to be sorted by build number (integer)
try:
dirs = sorted(dirs, key=lambda x: int(x.split('/')[-1]), reverse=True)
except ValueError:
dirs = sorted(dirs, reverse=True)
# We try to get the last valid build
for dir_name in dirs:
watermark = self.high_watermarks[instance_key][job_name]
try:
build_metadata = self._get_build_metadata(dir_name, watermark)
except Exception:
build_metadata = None
if build_metadata is not None:
build_result = build_metadata.get('result')
if build_result is None:
break
output = {
'job_name': job_name,
'event_type': 'build result'
}
output.update(build_metadata)
if 'number' not in output:
output['number'] = dir_name.split('/')[-1]
self.high_watermarks[instance_key][job_name] = output.get('timestamp')
self.log.debug("Processing %s results '%s'" % (job_name, output))
yield output
# If it not a new build, stop here
else:
break
except Exception, e:
self.log.error("Error while working on job %s, exception: %s" % (job_name, e))
def check(self, instance, create_event=True):
if self.high_watermarks.get(instance.get('name'), None) is None:
# On the first run of check(), prime the high_watermarks dict
# so that we only send events that occured after the agent
# started.
# (Setting high_watermarks in the next statement prevents
# any kind of infinite loop (assuming nothing ever sets
# high_watermarks to None again!))
self.high_watermarks[instance.get('name')] = defaultdict(lambda: 0)
self.check(instance, create_event=False)
jenkins_home = instance.get('jenkins_home')
if not jenkins_home:
raise Exception("No jenkins_home directory set in the config file")
jenkins_jobs_dir = os.path.join(jenkins_home, 'jobs', '*')
job_dirs = glob(jenkins_jobs_dir)
if not job_dirs:
raise Exception('No jobs found in `%s`! '
'Check `jenkins_home` in your config' % (jenkins_jobs_dir))
for job_dir in job_dirs:
for output in self._get_build_results(instance.get('name'), job_dir):
output['host'] = get_hostname(self.agentConfig)
if create_event:
self.log.debug("Creating event for job: %s" % output['job_name'])
self.event(output)
tags = [
'job_name:%s' % output['job_name'],
'result:%s' % output['result'],
'build_number:%s' % output['number']
]
if 'branch' in output:
tags.append('branch:%s' % output['branch'])
self.gauge("jenkins.job.duration", float(output['duration'])/1000.0, tags=tags)
if output['result'] == 'SUCCESS':
self.increment('jenkins.job.success', tags=tags)
else:
self.increment('jenkins.job.failure', tags=tags)
| bsd-3-clause |
JioCloud/cinder | cinder/api/contrib/scheduler_hints.py | 28 | 1924 | # Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import webob.exc
from cinder.api import extensions
from cinder.api.openstack import wsgi
from cinder.api.v2 import volumes
from cinder.i18n import _
LOG = logging.getLogger(__name__)
class SchedulerHintsController(wsgi.Controller):
@staticmethod
def _extract_scheduler_hints(body):
hints = {}
attr = '%s:scheduler_hints' % Scheduler_hints.alias
try:
if attr in body:
hints.update(body[attr])
except ValueError:
msg = _("Malformed scheduler_hints attribute")
raise webob.exc.HTTPBadRequest(explanation=msg)
return hints
@wsgi.extends
def create(self, req, body):
hints = self._extract_scheduler_hints(body)
if 'volume' in body:
body['volume']['scheduler_hints'] = hints
yield
class Scheduler_hints(extensions.ExtensionDescriptor):
"""Pass arbitrary key/value pairs to the scheduler."""
name = "SchedulerHints"
alias = "OS-SCH-HNT"
namespace = volumes.SCHEDULER_HINTS_NAMESPACE
updated = "2013-04-18T00:00:00+00:00"
def get_controller_extensions(self):
controller = SchedulerHintsController()
ext = extensions.ControllerExtension(self, 'volumes', controller)
return [ext]
| apache-2.0 |
snakeleon/YouCompleteMe-x86 | python/ycm/diagnostic_interface.py | 2 | 10173 | # Copyright (C) 2013 Google Inc.
#
# This file is part of YouCompleteMe.
#
# YouCompleteMe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# YouCompleteMe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with YouCompleteMe. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from future import standard_library
standard_library.install_aliases()
from builtins import * # noqa
from future.utils import itervalues, iteritems
from collections import defaultdict, namedtuple
from ycm import vimsupport
from ycm.diagnostic_filter import DiagnosticFilter, CompileLevel
import vim
class DiagnosticInterface( object ):
def __init__( self, user_options ):
self._user_options = user_options
self._diag_filter = DiagnosticFilter.CreateFromOptions( user_options )
# Line and column numbers are 1-based
self._buffer_number_to_line_to_diags = defaultdict(
lambda: defaultdict( list ) )
self._next_sign_id = 1
self._previous_line_number = -1
self._diag_message_needs_clearing = False
self._placed_signs = []
def OnCursorMoved( self ):
line, _ = vimsupport.CurrentLineAndColumn()
line += 1 # Convert to 1-based
if line != self._previous_line_number:
self._previous_line_number = line
if self._user_options[ 'echo_current_diagnostic' ]:
self._EchoDiagnosticForLine( line )
def GetErrorCount( self ):
return len( self._FilterDiagnostics( _DiagnosticIsError ) )
def GetWarningCount( self ):
return len( self._FilterDiagnostics( _DiagnosticIsWarning ) )
def PopulateLocationList( self, diags ):
vimsupport.SetLocationList(
vimsupport.ConvertDiagnosticsToQfList(
self._ApplyDiagnosticFilter( diags ) ) )
def UpdateWithNewDiagnostics( self, diags ):
normalized_diags = [ _NormalizeDiagnostic( x ) for x in
self._ApplyDiagnosticFilter( diags ) ]
self._buffer_number_to_line_to_diags = _ConvertDiagListToDict(
normalized_diags )
if self._user_options[ 'enable_diagnostic_signs' ]:
self._placed_signs, self._next_sign_id = _UpdateSigns(
self._placed_signs,
self._buffer_number_to_line_to_diags,
self._next_sign_id )
if self._user_options[ 'enable_diagnostic_highlighting' ]:
_UpdateSquiggles( self._buffer_number_to_line_to_diags )
if self._user_options[ 'always_populate_location_list' ]:
self.PopulateLocationList( normalized_diags )
def _ApplyDiagnosticFilter( self, diags, extra_predicate = None ):
filetypes = vimsupport.CurrentFiletypes()
diag_filter = self._diag_filter.SubsetForTypes( filetypes )
predicate = diag_filter.IsAllowed
if extra_predicate is not None:
def Filter( diag ):
return extra_predicate( diag ) and diag_filter.IsAllowed( diag )
predicate = Filter
return filter( predicate, diags )
def _EchoDiagnosticForLine( self, line_num ):
buffer_num = vim.current.buffer.number
diags = self._buffer_number_to_line_to_diags[ buffer_num ][ line_num ]
if not diags:
if self._diag_message_needs_clearing:
# Clear any previous diag echo
vimsupport.PostVimMessage( '', warning = False )
self._diag_message_needs_clearing = False
return
text = diags[ 0 ][ 'text' ]
if diags[ 0 ].get( 'fixit_available', False ):
text += ' (FixIt)'
vimsupport.PostVimMessage( text, warning = False, truncate = True )
self._diag_message_needs_clearing = True
def _FilterDiagnostics( self, predicate ):
matched_diags = []
line_to_diags = self._buffer_number_to_line_to_diags[
vim.current.buffer.number ]
for diags in itervalues( line_to_diags ):
matched_diags.extend( list(
self._ApplyDiagnosticFilter( diags, predicate ) ) )
return matched_diags
def _UpdateSquiggles( buffer_number_to_line_to_diags ):
vimsupport.ClearYcmSyntaxMatches()
line_to_diags = buffer_number_to_line_to_diags[ vim.current.buffer.number ]
for diags in itervalues( line_to_diags ):
for diag in diags:
location_extent = diag[ 'location_extent' ]
is_error = _DiagnosticIsError( diag )
if location_extent[ 'start' ][ 'line_num' ] < 0:
location = diag[ 'location' ]
vimsupport.AddDiagnosticSyntaxMatch(
location[ 'line_num' ],
location[ 'column_num' ] )
else:
vimsupport.AddDiagnosticSyntaxMatch(
location_extent[ 'start' ][ 'line_num' ],
location_extent[ 'start' ][ 'column_num' ],
location_extent[ 'end' ][ 'line_num' ],
location_extent[ 'end' ][ 'column_num' ],
is_error = is_error )
for diag_range in diag[ 'ranges' ]:
vimsupport.AddDiagnosticSyntaxMatch(
diag_range[ 'start' ][ 'line_num' ],
diag_range[ 'start' ][ 'column_num' ],
diag_range[ 'end' ][ 'line_num' ],
diag_range[ 'end' ][ 'column_num' ],
is_error = is_error )
def _UpdateSigns( placed_signs, buffer_number_to_line_to_diags, next_sign_id ):
new_signs, kept_signs, next_sign_id = _GetKeptAndNewSigns(
placed_signs, buffer_number_to_line_to_diags, next_sign_id
)
# Dummy sign used to prevent "flickering" in Vim when last mark gets
# deleted from buffer. Dummy sign prevents Vim to collapsing the sign column
# in that case.
# There's also a vim bug which causes the whole window to redraw in some
# conditions (vim redraw logic is very complex). But, somehow, if we place a
# dummy sign before placing other "real" signs, it will not redraw the
# buffer (patch to vim pending).
dummy_sign_needed = not kept_signs and new_signs
if dummy_sign_needed:
vimsupport.PlaceDummySign( next_sign_id + 1,
vim.current.buffer.number,
new_signs[ 0 ].line )
# We place only those signs that haven't been placed yet.
new_placed_signs = _PlaceNewSigns( kept_signs, new_signs )
# We use incremental placement, so signs that already placed on the correct
# lines will not be deleted and placed again, which should improve performance
# in case of many diags. Signs which don't exist in the current diag should be
# deleted.
_UnplaceObsoleteSigns( kept_signs, placed_signs )
if dummy_sign_needed:
vimsupport.UnPlaceDummySign( next_sign_id + 1, vim.current.buffer.number )
return new_placed_signs, next_sign_id
def _GetKeptAndNewSigns( placed_signs, buffer_number_to_line_to_diags,
next_sign_id ):
new_signs = []
kept_signs = []
for buffer_number, line_to_diags in iteritems(
buffer_number_to_line_to_diags ):
if not vimsupport.BufferIsVisible( buffer_number ):
continue
for line, diags in iteritems( line_to_diags ):
for diag in diags:
sign = _DiagSignPlacement( next_sign_id,
line,
buffer_number,
_DiagnosticIsError( diag ) )
if sign not in placed_signs:
new_signs += [ sign ]
next_sign_id += 1
else:
# We use .index here because `sign` contains a new id, but
# we need the sign with the old id to unplace it later on.
# We won't be placing the new sign.
kept_signs += [ placed_signs[ placed_signs.index( sign ) ] ]
return new_signs, kept_signs, next_sign_id
def _PlaceNewSigns( kept_signs, new_signs ):
placed_signs = kept_signs[:]
for sign in new_signs:
# Do not set two signs on the same line, it will screw up storing sign
# locations.
if sign in placed_signs:
continue
vimsupport.PlaceSign( sign.id, sign.line, sign.buffer, sign.is_error )
placed_signs.append(sign)
return placed_signs
def _UnplaceObsoleteSigns( kept_signs, placed_signs ):
for sign in placed_signs:
if sign not in kept_signs:
vimsupport.UnplaceSignInBuffer( sign.buffer, sign.id )
def _ConvertDiagListToDict( diag_list ):
buffer_to_line_to_diags = defaultdict( lambda: defaultdict( list ) )
for diag in diag_list:
location = diag[ 'location' ]
buffer_number = vimsupport.GetBufferNumberForFilename(
location[ 'filepath' ] )
line_number = location[ 'line_num' ]
buffer_to_line_to_diags[ buffer_number ][ line_number ].append( diag )
for line_to_diags in itervalues( buffer_to_line_to_diags ):
for diags in itervalues( line_to_diags ):
# We also want errors to be listed before warnings so that errors aren't
# hidden by the warnings; Vim won't place a sign oven an existing one.
diags.sort( key = lambda diag: ( diag[ 'location' ][ 'column_num' ],
diag[ 'kind' ] ) )
return buffer_to_line_to_diags
_DiagnosticIsError = CompileLevel( 'error' )
_DiagnosticIsWarning = CompileLevel( 'warning' )
def _NormalizeDiagnostic( diag ):
def ClampToOne( value ):
return value if value > 0 else 1
location = diag[ 'location' ]
location[ 'column_num' ] = ClampToOne( location[ 'column_num' ] )
location[ 'line_num' ] = ClampToOne( location[ 'line_num' ] )
return diag
class _DiagSignPlacement(
namedtuple( "_DiagSignPlacement",
[ 'id', 'line', 'buffer', 'is_error' ] ) ):
# We want two signs that have different ids but the same location to compare
# equal. ID doesn't matter.
def __eq__( self, other ):
return ( self.line == other.line and
self.buffer == other.buffer and
self.is_error == other.is_error )
| gpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.