repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
EUDAT-B2SHARE/invenio-old | modules/miscutil/lib/pluginutils.py | 3 | 35174 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""
This module implement a generic plugin container facility.
"""
import sys
import os
import glob
import inspect
import imp
from invenio.errorlib import register_exception
from invenio.config import CFG_PYLIBDIR
from invenio.textutils import wrap_text_in_a_box
class InvenioPluginContainerError(Exception):
"""
Exception raised when some error happens during plugin loading.
"""
pass
class PluginContainer(object):
"""
This class implements a I{plugin container}.
This class implements part of the dict interface with the condition
that only correctly enabled plugins can be retrieved by their plugin_name.
>>> ## Loading all the plugin within a directory.
>>> websubmit_functions = PluginContainer(
... os.path.join(CFG_PYLIBDIR,
... 'invenio', 'websubmit_functions', '*.py')
... )
>>> ## Loading an explicit plugin.
>>> case_eds = websubmit_functions['CaseEDS']
@param plugin_pathnames: zero or more plugins_pathnames from where to load
the plugins.
@type plugin_pathnames: string/list
@param plugin_builder: a callable with the signature
C{plugin_builder(plugin_name, plugin_code)} that will be called
to extract the actual plugin from the module stored in plugin_code.
@type plugin_builder: callable
@param api_version: the API version of the plugin. If specified, plugins
which specify different versions will fail to be loaded. Default value
is C{None} which turns off the version checking.
@type api_version: integer
@param plugin_signature: a stub to be used in order to check if a loaded
plugin respect a particular signature or not.
@type plugin_signature: class/function
@param external: are the plugins loaded from outside the Invenio standard lib
directory? Defaults to False.
@type external: bool
@param register_exception: should exceptions be registered when loading
plugins? Defaults to True.
@type register_exception: bool
@ivar _plugin_map: a map between plugin_name and a dict with keys
"error", "plugin", "plugin_path", "enabled", "api_version"
@type _plugin_map: dict
@ivar _plugin_pathnames: the list of normalized plugin pathnames
corresponding to the plugins to be loaded.
@type _plugin_pathnames: list
@ivar _plugin_builder: the plugin builder as passed to the constructor.
@type plugin_builder: function
@ivar api_version: the version as provided to the constructor.
@type api_version: integer
@ivar external: are the plugins loaded from outside the Invenio standard lib
directory? Defaults to False.
@type external: bool
@ivar exception_registration: should exceptions be registered when loading
plugins? Defaults to True.
@type exception_registration: bool
@group Mapping interface: __contains__,__getitem__,get,has_key,items,
iteritems,iterkeys,itervalues,keys,values,__len__
@group Main API: __init__,add_plugin_pathnames,get_enabled_plugins,
get_broken_plugins,get_plugin,reload_plugins
"""
def __init__(self,
plugin_pathnames=None,
plugin_builder=None,
api_version=None,
plugin_signature=None,
external=False,
exception_registration=True):
self._plugin_map = {}
self._plugin_pathnames = []
self._external = external
self.api_version = api_version
self._register_exception = exception_registration
if plugin_builder is None:
self._plugin_builder = self.default_plugin_builder
else:
self._plugin_builder = plugin_builder
self._plugin_signature = plugin_signature
if plugin_pathnames:
self.add_plugin_pathnames(plugin_pathnames)
def default_plugin_builder(plugin_name, plugin_code):
"""
Default plugin builder used to extract the plugin from the module
that contains it.
@note: By default it will look for a class or function with the same
name of the plugin.
@param plugin_name: the name of the plugin.
@type plugin_name: string
@param plugin_code: the code of the module as just read from
filesystem.
@type plugin_code: module
@return: the plugin
"""
return getattr(plugin_code, plugin_name)
default_plugin_builder = staticmethod(default_plugin_builder)
def add_plugin_pathnames(self, plugin_pathnames):
"""
Add a one or more plugin pathnames, i.e. full plugin path exploiting
wildcards, e.g. "bibformat_elements/bfe_*.py".
@note: these plugins_pathnames will be added to the current list of
plugin_pathnames, and all the plugins will be reloaded.
@param plugin_pathnames: one or more plugins_pathnames
@type plugin_pathnames: string/list
"""
if type(plugin_pathnames) is str:
self._plugin_pathnames.append(plugin_pathnames)
else:
self._plugin_pathnames.extend(plugin_pathnames)
self.reload_plugins()
def enable_plugin(self, plugin_name):
"""
Enable plugin_name.
@param plugin_name: the plugin name.
@type plugin_name: string
@raise KeyError: if the plugin does not exists.
"""
self._plugin_map[plugin_name]['enabled'] = True
def disable_plugin(self, plugin_name):
"""
Disable plugin_name.
@param plugin_name: the plugin name.
@type plugin_name: string
@raise KeyError: if the plugin does not exists.
"""
self._plugin_map[plugin_name]['enabled'] = False
def plugin_enabled_p(self, plugin_name):
"""
Returns True if the plugin is correctly enabled.
@param plugin_name: the plugin name.
@type plugin_name: string
@return: True if the plugin is correctly enabled..
@rtype: bool
@raise KeyError: if the plugin does not exists.
"""
return self._plugin_map[plugin_name]['enabled']
def get_plugin_filesystem_path(self, plugin_name):
"""
Returns the filesystem path from where the plugin was loaded.
@param plugin_name: the plugin name.
@type plugin_name: string
@return: the filesystem path.
@rtype: string
@raise KeyError: if the plugin does not exists.
"""
return self._plugin_map[plugin_name]['plugin_path']
def get_plugin(self, plugin_name):
"""
Returns the plugin corresponding to plugin_name.
@param plugin_name: the plugin name,
@type plugin_name: string
@return: the plugin
@raise KeyError: if the plugin does not exists or is not enabled.
"""
if self._plugin_map[plugin_name]['enabled']:
return self._plugin_map[plugin_name]['plugin']
else:
raise KeyError('"%s" is not enabled' % plugin_name)
def get_broken_plugins(self):
"""
Returns a map between plugin names and errors, in the form of
C{sys.exc_info} structure.
@return: plugin_name -> sys.exc_info().
@rtype: dict
"""
ret = {}
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['error']:
ret[plugin_name] = plugin['error']
return ret
def reload_plugins(self, reload=False):
"""
For the plugins found through iterating in the plugin_pathnames, loads
and working plugin.
@note: if a plugin has the same plugin_name of an already loaded
plugin, the former will override the latter (provided that the
former had a compatible signature to the latter).
@note: any plugin that fails to load will be added to the plugin
map as disabled and the sys.exc_info() captured during the
Exception will be stored. (if the failed plugin was supposed to
override an existing one, the latter will be overridden by
the failed former).
"""
# The reload keyword argument exists for backwards compatibility.
# Previously, reload_plugins, would not reload a module due to a bug.
for plugin_path in self._plugin_pathnames_iterator():
self._load_plugin(plugin_path, reload=reload)
def normalize_plugin_path(self, plugin_path):
"""
Returns a normalized plugin_path.
@param plugin_path: the plugin path.
@type plugin_path: string
@return: the normalized plugin path.
@rtype: string
@raise ValueError: if the path is not under CFG_PYLIBDIR/invenio
"""
invenio_path = os.path.abspath(os.path.join(CFG_PYLIBDIR, 'invenio'))
plugin_path = os.path.abspath(plugin_path)
if not self._external and not os.path.abspath(plugin_path).startswith(invenio_path):
raise ValueError('A plugin should be stored under "%s" ("%s" was'
' specified)' % (invenio_path, plugin_path))
return plugin_path
def _plugin_pathnames_iterator(self):
"""
Returns an iterator over all the normalized plugin path.
@note: older plugin_pathnames are considered first, and newer
plugin_pathnames later, so that plugin overriding is possible.
@return: the iterator over plugin paths.
@rtype: iterator
"""
for plugin_pathname in self._plugin_pathnames:
for plugin_path in glob.glob(plugin_pathname):
yield self.normalize_plugin_path(plugin_path)
def get_plugin_name(plugin_path):
"""
Returns the name of the plugin after the plugin_path.
@param plugin_path: the filesystem path to the plugin code.
@type plugin_path: string
@return: the plugin name.
@rtype: string
"""
plugin_name = os.path.basename(plugin_path)
if plugin_name.endswith('.py'):
plugin_name = plugin_name[:-len('.py')]
return plugin_name
get_plugin_name = staticmethod(get_plugin_name)
def _load_plugin(self, plugin_path, reload=False):
"""
Load a plugin in the plugin map.
@note: if the plugin_name calculated from plugin_path corresponds to
an already existing plugin, the old plugin will be overridden and
if the old plugin was correctly loaded but disabled also the
new plugin will be disabled.
@param plugin_path: the plugin path.
@type plugin_path: string
"""
api_version = None
try:
plugin_name = self.get_plugin_name(plugin_path)
# Let's see if the module is already loaded
plugin = None
if plugin_name in sys.modules:
mod = sys.modules[plugin_name]
if os.path.splitext(mod.__file__)[0] == os.path.splitext(plugin_path)[0]:
plugin = mod
if not plugin or reload:
# Let's load the plugin module.
plugin_fp, plugin_path, plugin_desc = imp.find_module(
plugin_name, [os.path.dirname(plugin_path)]
)
try:
plugin = imp.load_module(
plugin_name, plugin_fp, plugin_path, plugin_desc
)
finally:
if plugin_fp:
plugin_fp.close()
## Let's check for API version.
api_version = getattr(plugin, '__plugin_version__', None)
if self.api_version and api_version != self.api_version:
raise InvenioPluginContainerError("Plugin version mismatch."
" Expected %s, found %s" % (self.api_version, api_version))
## Let's load the actual plugin
plugin = self._plugin_builder(plugin_name, plugin)
## Are we overriding an already loaded plugin?
enabled = True
if plugin_name in self._plugin_map:
old_plugin = self._plugin_map[plugin_name]
if old_plugin['error'] is None:
enabled = old_plugin['enabled']
check_signature(plugin_name, old_plugin['plugin'], plugin)
## Let's check the plugin signature.
if self._plugin_signature:
check_signature(plugin_name, self._plugin_signature, plugin)
self._plugin_map[plugin_name] = {
'plugin': plugin,
'error': None,
'plugin_path': plugin_path,
'enabled': enabled,
'api_version': api_version,
}
except Exception:
if self._register_exception:
register_exception()
self._plugin_map[plugin_name] = {
'plugin': None,
'error': sys.exc_info(),
'plugin_path': plugin_path,
'enabled': False,
'api_version': api_version,
}
def __getitem__(self, plugin_name):
"""
As in C{dict.__getitem__} but apply plugin name normalization and check
if the plugin is correctly enabled.
@param plugin_name: the name of the plugin
@type plugin_name: string
@return: the plugin.
@raise KeyError: if the corresponding plugin is not enabled or there
were some errors.
"""
plugin_name = self.get_plugin_name(plugin_name)
if plugin_name in self._plugin_map and \
self._plugin_map[plugin_name]['enabled'] is True:
return self._plugin_map[plugin_name]['plugin']
else:
raise KeyError('"%s" does not exists or is not correctly enabled' %
plugin_name)
def __contains__(self, plugin_name):
"""
As in C{dict.__contains__} but apply plugin name normalization and
check if the plugin is correctly enabled.
@param plugin_name: the name of the plugin
@type plugin_name: string
@return: True if plugin_name is correctly there.
@rtype: bool
"""
plugin_name = self.get_plugin_name(plugin_name)
return plugin_name in self._plugin_map and \
self._plugin_map[plugin_name]['enabled'] is True
def __len__(self):
"""
As in C{dict.__len__} but consider only correctly enabled plugins.
@return: the total number of plugins correctly enabled.
@rtype: integer
"""
count = 0
for plugin in self._plugin_map.values():
if plugin['enabled']:
count += 1
return count
def get(self, plugin_name, default=None):
"""
As in C{dict.get} but consider only correctly enabled plugins.
@param plugin_name: the name of the plugin
@type plugin_name: string
@param default: the default value to return if plugin_name does not
correspond to a correctly enabled plugin.
@return: the total number of plugins correctly enabled.
@rtype: integer
"""
try:
return self.__getitem__(plugin_name)
except KeyError:
return default
def has_key(self, plugin_name):
"""
As in C{dict.has_key} but apply plugin name normalization and check
if the plugin is correctly enabled.
@param plugin_name: the name of the plugin
@type plugin_name: string
@return: True if plugin_name is correctly there.
@rtype: bool
"""
return self.__contains__(plugin_name)
def items(self):
"""
As in C{dict.items} but checks if the plugin are correctly enabled.
@return: list of (plugin_name, plugin).
@rtype: [(plugin_name, plugin), ...]
"""
ret = []
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['enabled']:
ret.append((plugin_name, plugin['plugin']))
return ret
def iteritems(self):
"""
As in C{dict.iteritems} but checks if the plugin are correctly enabled.
@return: an iterator over the (plugin_name, plugin) items.
"""
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['enabled']:
yield (plugin_name, plugin['plugin'])
def iterkeys(self):
"""
As in C{dict.iterkeys} but checks if the plugin are correctly enabled.
@return: an iterator over the plugin_names.
"""
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['enabled']:
yield plugin_name
__iter__ = iterkeys
def itervalues(self):
"""
As in C{dict.itervalues} but checks if the plugin are correctly
enabled.
@return: an iterator over the plugins.
"""
for plugin in self._plugin_map.itervalues():
if plugin['enabled']:
yield plugin['plugin']
def keys(self):
"""
As in C{dict.keys} but checks if the plugin are correctly enabled.
@return: the list of enabled plugin_names.
@rtype: list of strings
"""
ret = []
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['enabled']:
ret.append(plugin_name)
return ret
def values(self):
"""
As in C{dict.values} but checks if the plugin are correctly enabled.
@return: the list of enabled plugin codes.
"""
return [plugin['plugin'] \
for plugin in self._plugin_map.values() if plugin['enabled']]
def get_enabled_plugins(self):
"""
Return a map of the correctly enabled plugins.
@return: a map plugin_name -> plugin
@rtype: dict
"""
ret = {}
for plugin_name, plugin in self._plugin_map.iteritems():
if plugin['enabled']:
ret[plugin_name] = plugin['plugin']
return ret
def check_signature(object_name, reference_object, other_object):
"""
Given a reference class or function check if an other class or function
could be substituted without causing any instantiation/usage issues.
@param object_name: the name of the object being checked.
@type object_name: string
@param reference_object: the reference class or function.
@type reference_object: class/function
@param other_object: the other class or function to be checked.
@type other_object: class/function
@raise InvenioPluginContainerError: in case the other object is not
compatible with the reference object.
"""
try:
if inspect.isclass(reference_object):
## if the reference_object is a class
if inspect.isclass(other_object):
## if the other_object is a class
if issubclass(other_object, reference_object):
## if the other_object is derived from the reference we
## should check for all the method in the former that
## exists in the the latter, wethever they recursively have
## the same signature.
reference_object_map = dict(
inspect.getmembers(reference_object,
inspect.isroutine))
for other_method_name, other_method_code in \
inspect.getmembers(other_object, inspect.isroutine):
if other_method_name in reference_object_map:
check_signature(object_name,
reference_object_map[other_method_name],
other_method_code)
else:
## if the other_object is not derived from the
## reference_object then all the method declared in the
## latter should exist in the former and they should
## recursively have the same signature.
other_object_map = dict(
inspect.getmembers(other_object, inspect.isroutine))
for reference_method_name, reference_method_code in \
inspect.getmembers(
reference_object, inspect.isroutine):
if reference_method_name in other_object_map:
check_signature(
object_name, reference_method_code,
other_method_code)
else:
raise InvenioPluginContainerError('"%s", which'
' exists in the reference class, does not'
' exist in the other class, and the reference'
' class is not an anchestor of the other' %
reference_method_name)
else:
## We are comparing apples and oranges!
raise InvenioPluginContainerError("%s (the reference object)"
" is a class while %s (the other object) is not a class" %
(reference_object, other_object))
elif inspect.isroutine(reference_object):
## if the reference_object is a function
if inspect.isroutine(other_object):
## if the other_object is a function we will compare the
## reference_object and other_object function signautre i.e.
## their parameters.
reference_args, reference_varargs, reference_varkw, \
reference_defaults = inspect.getargspec(reference_object)
other_args, other_varargs, other_varkw, \
other_defaults = inspect.getargspec(other_object)
## We normalize the reference_defaults to be a list
if reference_defaults is not None:
reference_defaults = list(reference_defaults)
else:
reference_defaults = []
## We normalize the other_defaults to be a list
if other_defaults is not None:
other_defaults = list(other_defaults)
else:
other_defaults = []
## Check for presence of missing parameters in other function
if not (other_varargs or other_varkw):
for reference_arg in reference_args:
if reference_arg not in other_args:
raise InvenioPluginContainerError('Argument "%s"'
' in reference function %s does not exist in'
' the other function %s' % (reference_arg,
reference_object, other_object))
## Check for presence of additional parameters in other
## function
if not (reference_varargs or reference_varkw):
for other_arg in other_args:
if other_arg not in reference_args:
raise InvenioPluginContainerError('Argument "%s"'
' in other function %s does not exist in the'
' reference function %s' % (other_arg,
other_object, reference_object))
## Check sorting of arguments
for reference_arg, other_arg in map(
None, reference_args, other_args):
if not((reference_arg == other_arg) or
(reference_arg is None and
(reference_varargs or reference_varkw)) or
(other_arg is None and
(other_args or other_varargs))):
raise InvenioPluginContainerError('Argument "%s" in'
' the other function is in the position of'
' argument "%s" in the reference function, i.e.'
' the order of arguments is not respected' %
(other_arg, reference_arg))
if len(reference_defaults) != len(other_defaults) and \
not (reference_args or reference_varargs
or other_args or other_varargs):
raise InvenioPluginContainerError("Default parameters in"
" the other function are not corresponding to the"
" default of parameters of the reference function")
else:
## We are comparing apples and oranges!
raise InvenioPluginContainerError('%s (the reference object)'
' is a function while %s (the other object) is not a'
' function' % (reference_object, other_object))
except InvenioPluginContainerError, err:
try:
sourcefile = inspect.getsourcefile(other_object)
sourceline = inspect.getsourcelines(other_object)[1]
except IOError:
## other_object is not loaded from a real file
sourcefile = 'N/A'
sourceline = 'N/A'
raise InvenioPluginContainerError('Error in checking signature for'
' "%s" as defined at "%s" (line %s): %s' %
(object_name, sourcefile, sourceline, err))
def create_enhanced_plugin_builder(
compulsory_objects=None, optional_objects=None, other_data=None):
"""
Creates a plugin_builder function suitable to extract some specific
objects (either compulsory or optional) and other simpler data
>>> def dummy_needed_funct1(foo, bar):
... pass
>>> class dummy_needed_class1:
... def __init__(self, baz):
... pass
>>> def dummy_optional_funct2(boo):
... pass
>>> create_enhanced_plugin_builder(
... compulsory_objects={
... 'needed_funct1' : dummy_needed_funct1,
... 'needed_class1' : dummy_needed_class1
... },
... optional_objects={
... 'optional_funct2' : dummy_optional_funct2,
... },
... other_data={
... 'CFG_SOME_DATA' : (str, ''),
... 'CFG_SOME_INT' : (int, 0),
... })
<function plugin_builder at 0xb7812064>
@param compulsory_objects: map of name of an object to look for inside
the C{plugin_code} and a I{signature} for a class or callable. Every
name specified in this map B{must exists} in the plugin_code, otherwise
the plugin will fail to load.
@type compulsory_objects: dict
@param optional_objects: map of name of an object to look for inside
the C{plugin_code} and a I{signature} for a class or callable. Every
name specified in this map must B{can exists} in the plugin_code.
@type optional_objects: dict
@param other_data: map of other simple data that can be loaded from
the plugin_code. The map has the same format of the C{content}
parameter of L{invenio.webinterface_handler.wash_urlargd}.
@type other_data: dict
@return: a I{plugin_builder} function that can be used with the
C{PluginContainer} constructor. Such function will build the plugin
in the form of a map, where every key is one of the keys inside
the three maps provided as parameters and the corresponding value
is the expected class or callable or simple data.
"""
from invenio.webinterface_handler import wash_urlargd
def plugin_builder(plugin_name, plugin_code):
"""
Enhanced plugin_builder created by L{create_enhanced_plugin_builder}.
@param plugin_name: the name of the plugin.
@type plugin_name: string
@param plugin_code: the code of the module as just read from
filesystem.
@type plugin_code: module
@return: the plugin in the form of a map.
"""
plugin = {}
if compulsory_objects:
for object_name, object_signature in \
compulsory_objects.iteritems():
the_object = getattr(plugin_code, object_name, None)
if the_object is None:
raise InvenioPluginContainerError('Plugin "%s" does not '
'contain compulsory object "%s"' % (plugin_name,
object_name))
try:
check_signature(object_name, the_object, object_signature)
except InvenioPluginContainerError, err:
raise InvenioPluginContainerError('Plugin "%s" contains '
'object "%s" with a wrong signature: %s' %
(plugin_name, object_name, err))
plugin[object_name] = the_object
if optional_objects:
for object_name, object_signature in optional_objects.iteritems():
the_object = getattr(plugin_code, object_name, None)
if the_object is not None:
try:
check_signature(
object_name,
the_object,
object_signature)
except InvenioPluginContainerError, err:
raise InvenioPluginContainerError('Plugin "%s" '
'contains object "%s" with a wrong signature: %s' %
(plugin_name, object_name, err))
plugin[object_name] = the_object
if other_data:
the_other_data = {}
for data_name, (dummy, data_default) in other_data.iteritems():
the_other_data[data_name] = getattr(
plugin_code, data_name, data_default)
try:
the_other_data = wash_urlargd(the_other_data, other_data)
except Exception, err:
raise InvenioPluginContainerError('Plugin "%s" contains other '
'data with problems: %s' % (plugin_name, err))
plugin.update(the_other_data)
return plugin
return plugin_builder
def get_callable_signature_as_string(the_callable):
"""
Returns a string representing a callable as if it would have been
declared on the prompt.
>>> def foo(arg1, arg2, arg3='val1', arg4='val2', *args, **argd):
... pass
>>> get_callable_signature_as_string(foo)
def foo(arg1, arg2, arg3='val1', arg4='val2', *args, **argd)
@param the_callable: the callable to be analyzed.
@type the_callable: function/callable.
@return: the signature.
@rtype: string
"""
args, varargs, varkw, defaults = inspect.getargspec(the_callable)
tmp_args = list(args)
args_dict = {}
if defaults:
defaults = list(defaults)
else:
defaults = []
while defaults:
args_dict[tmp_args.pop()] = defaults.pop()
while tmp_args:
args_dict[tmp_args.pop()] = None
args_list = []
for arg in args:
if args_dict[arg] is not None:
args_list.append("%s=%s" % (arg, repr(args_dict[arg])))
else:
args_list.append(arg)
if varargs:
args_list.append("*%s" % varargs)
if varkw:
args_list.append("**%s" % varkw)
args_string = ', '.join(args_list)
return "def %s(%s)" % (the_callable.__name__, args_string)
def get_callable_documentation(the_callable):
"""
Returns a string with the callable signature and its docstring.
@param the_callable: the callable to be analyzed.
@type the_callable: function/callable.
@return: the signature.
@rtype: string
"""
return wrap_text_in_a_box(
title=get_callable_signature_as_string(the_callable),
body=(getattr(the_callable, '__doc__') or 'No documentation').replace(
'\n', '\n\n'),
style='ascii_double')
def check_arguments_compatibility(the_callable, argd):
"""
Check if calling the_callable with the given arguments would be correct
or not.
>>> def foo(arg1, arg2, arg3='val1', arg4='val2', *args, **argd):
... pass
>>> try: check_arguments_compatibility(foo, {'arg1': 'bla', 'arg2': 'blo'})
... except ValueError, err: print 'failed'
... else: print 'ok'
ok
>>> try: check_arguments_compatibility(foo, {'arg1': 'bla'})
... except ValueError, err: print 'failed'
... else: print 'ok'
failed
Basically this function is simulating the call:
>>> the_callable(**argd)
but it only checks for the correctness of the arguments, without
actually calling the_callable.
@param the_callable: the callable to be analyzed.
@type the_callable: function/callable
@param argd: the arguments to be passed.
@type argd: dict
@raise ValueError: in case of uncompatibility
"""
if not argd:
argd = {}
args, dummy, varkw, defaults = inspect.getargspec(the_callable)
tmp_args = list(args)
optional_args = []
args_dict = {}
if defaults:
defaults = list(defaults)
else:
defaults = []
while defaults:
arg = tmp_args.pop()
optional_args.append(arg)
args_dict[arg] = defaults.pop()
while tmp_args:
args_dict[tmp_args.pop()] = None
for arg, dummy_value in argd.iteritems():
if arg in args_dict:
del args_dict[arg]
elif not varkw:
raise ValueError('Argument %s not expected when calling callable '
'"%s" with arguments %s' % (
arg, get_callable_signature_as_string(the_callable), argd))
for arg in args_dict.keys():
if arg in optional_args:
del args_dict[arg]
if args_dict:
raise ValueError('Arguments %s not specified when calling callable '
'"%s" with arguments %s' % (
', '.join(args_dict.keys()),
get_callable_signature_as_string(the_callable),
argd))
| gpl-2.0 |
kursitet/edx-platform | cms/djangoapps/contentstore/management/commands/tests/test_fix_not_found.py | 87 | 2147 | """
Tests for the fix_not_found management command
"""
from django.core.management import CommandError, call_command
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
class TestFixNotFound(ModuleStoreTestCase):
"""
Tests for the fix_not_found management command
"""
def test_no_args(self):
"""
Test fix_not_found command with no arguments
"""
with self.assertRaisesRegexp(CommandError, "Error: too few arguments"):
call_command('fix_not_found')
def test_fix_not_found_non_split(self):
"""
The management command doesn't work on non split courses
"""
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.mongo)
with self.assertRaisesRegexp(CommandError, "The owning modulestore does not support this command."):
call_command("fix_not_found", unicode(course.id))
def test_fix_not_found(self):
course = CourseFactory.create(default_store=ModuleStoreEnum.Type.split)
ItemFactory.create(category='chapter', parent_location=course.location)
# get course again in order to update its children list
course = self.store.get_course(course.id)
# create a dangling usage key that we'll add to the course's children list
dangling_pointer = course.id.make_usage_key('chapter', 'DanglingPointer')
course.children.append(dangling_pointer)
self.store.update_item(course, self.user.id)
# the course block should now point to two children, one of which
# doesn't actually exist
self.assertEqual(len(course.children), 2)
self.assertIn(dangling_pointer, course.children)
call_command("fix_not_found", unicode(course.id))
# make sure the dangling pointer was removed from
# the course block's children
course = self.store.get_course(course.id)
self.assertEqual(len(course.children), 1)
self.assertNotIn(dangling_pointer, course.children)
| agpl-3.0 |
tszabo-ro/paparazzi | sw/tools/tcp_aircraft_server/phoenix/__init__.py | 86 | 4470 | #Copyright 2014, Antoine Drouin
"""
Phoenix is a Python library for interacting with Paparazzi
"""
import math
"""
Unit convertions
"""
def rad_of_deg(d): return d/180.*math.pi
def deg_of_rad(r): return r*180./math.pi
def rps_of_rpm(r): return r*2.*math.pi/60.
def rpm_of_rps(r): return r/2./math.pi*60.
def m_of_inch(i): return i*0.0254
"""
Plotting
"""
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
my_title_spec = {'color' : 'k', 'fontsize' : 20 }
def save_if(filename):
if filename: matplotlib.pyplot.savefig(filename, dpi=80)
def prepare_fig(fig=None, window_title=None, figsize=(20.48, 10.24), margins=None):
if fig == None:
fig = plt.figure(figsize=figsize)
# else:
# plt.figure(fig.number)
if margins:
left, bottom, right, top, wspace, hspace = margins
fig.subplots_adjust(left=left, right=right, bottom=bottom, top=top,
hspace=hspace, wspace=wspace)
if window_title:
fig.canvas.set_window_title(window_title)
return fig
def decorate(ax, title=None, xlab=None, ylab=None, legend=None, xlim=None, ylim=None):
ax.xaxis.grid(color='k', linestyle='-', linewidth=0.2)
ax.yaxis.grid(color='k', linestyle='-', linewidth=0.2)
if xlab:
ax.xaxis.set_label_text(xlab)
if ylab:
ax.yaxis.set_label_text(ylab)
if title:
ax.set_title(title, my_title_spec)
if legend <> None:
ax.legend(legend, loc='best')
if xlim <> None:
ax.set_xlim(xlim[0], xlim[1])
if ylim <> None:
ax.set_ylim(ylim[0], ylim[1])
"""
Messages
"""
#: dictionary mapping the C type to its length in bytes (e.g char -> 1)
TYPE_TO_LENGTH_MAP = {
"char" : 1,
"uint8" : 1,
"int8" : 1,
"uint16" : 2,
"int16" : 2,
"uint32" : 4,
"int32" : 4,
"float" : 4,
"double" : 8,
}
#: dictionary mapping the C type to correct format string
TYPE_TO_PRINT_MAP = {
float : "%f",
str : "%s",
chr : "%c",
int : "%d"
}
ACID_ALL = 0xFF
ACID_TEST = 0xFE
ACID_GROUNDSTATION = 0xFD
#: dictionary mapping debug types to format characters
DEBUG_MESSAGES = {
"DEBUG_UINT8" : "%d",
"DEBUG_INT32" : "%d",
"DEBUG_FLOAT" : "%#f"
}
"""
Binary logs
See format description in sw/airborne/subsystems/datalink/fms_link.c
"""
import struct
def hex_of_bin(b): return ' '.join( [ "%02X" % ord( x ) for x in b ] )
import pdb
def read_binary_log(filename, tick_freq = 2*512.):
f = open(filename, "rb")
d = f.read()
packet_header_len = 6
msg_header_len = 2
def read_packet(d, packet_start):
payload_start = packet_start+packet_header_len
timestamp, payload_len = struct.unpack("IH", d[packet_start:payload_start])
msgs = read_packet_payload(d, payload_start, payload_len)
next_packet = payload_start+payload_len+2
return timestamp, msgs, next_packet
def read_packet_payload(d, s, l):
msgs = []
packet_end = s+l; msg_start = s
while msg_start<packet_end:
payload_start = msg_start+msg_header_len
msg_len, msg_id = struct.unpack("BB", d[msg_start:payload_start])
payload_end = payload_start+msg_len
msg_payload = d[payload_start:payload_end]
msgs.append([msg_id, msg_payload])
#print msg_id, msg_len, hex_of_bin(msg_payload)
msg_start = payload_end
return msgs
packets = []
packet_start=0
while packet_start<len(d):
timestamp, msgs, next_packet = read_packet(d, packet_start)
packets.append([timestamp/tick_freq, msgs])
#print timestamp, msgs
packet_start = next_packet
f.close()
return packets
def extract_from_binary_log(protocol, packets, msg_names, t_min=None, t_max=None):
ret = [{'time':[], 'data':[]} for m in msg_names]
if t_min == None: t_min = packets[0][0]
if t_max == None: t_max = packets[-1][0]
for t, msgs in packets:
if t>= t_min and t<= t_max:
for id, payload in msgs:
m = protocol.get_message_by_id('telemetry', id)
try: i = msg_names.index(m.name)
except: pass
finally: ret[i]['time'].append(t); ret[i]['data'].append(m.unpack_scaled_values(payload))
return ret
| gpl-2.0 |
ericfc/django | tests/postgres_tests/test_unaccent.py | 328 | 1884 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.test import modify_settings
from . import PostgreSQLTestCase
from .models import CharFieldModel, TextFieldModel
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.postgres'})
class UnaccentTest(PostgreSQLTestCase):
Model = CharFieldModel
def setUp(self):
self.Model.objects.bulk_create([
self.Model(field="àéÖ"),
self.Model(field="aeO"),
self.Model(field="aeo"),
])
def test_unaccent(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent="aeO"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False
)
def test_unaccent_chained(self):
"""
Check that unaccent can be used chained with a lookup (which should be
the case since unaccent implements the Transform API)
"""
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent__iexact="aeO"),
["àéÖ", "aeO", "aeo"],
transform=lambda instance: instance.field,
ordered=False
)
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent__endswith="éÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False
)
def test_unaccent_accentuated_needle(self):
self.assertQuerysetEqual(
self.Model.objects.filter(field__unaccent="aéÖ"),
["àéÖ", "aeO"],
transform=lambda instance: instance.field,
ordered=False
)
class UnaccentTextFieldTest(UnaccentTest):
"""
TextField should have the exact same behavior as CharField
regarding unaccent lookups.
"""
Model = TextFieldModel
| bsd-3-clause |
fotonauts/fwissr-python | setup.py | 1 | 1303 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
from fwissr.version import VERSION
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
readme = open('README.rst').read()
history = open('HISTORY.rst').read().replace('.. :changelog:', '')
setup(
name='fwissr',
version=VERSION,
description='fwissr is a registry configuration tool.',
long_description=readme + '\n\n' + history,
author='Pierre Baillet',
author_email='pierre@baillet.name',
url='https://github.com/fotonauts/fwissr-python',
packages=[
'fwissr',
'fwissr.source'
],
scripts=['scripts/fwissr'],
package_dir={'fwissr': 'fwissr', 'fwissr.source': 'fwissr/source'},
include_package_data=True,
install_requires=[
'pymongo>=2.5.2',
'PyYAML>=3.10'
],
license="MIT",
zip_safe=False,
keywords='fwissr',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7'
],
test_suite='tests',
)
| mit |
cloudmesh/clientweb | cloudmesh_webclient/web.py | 1 | 1293 | from __future__ import print_function
from flask import render_template, Flask
from cloudmesh_base.util import HEADING
from pprint import pprint
from cloudmesh_client.db import CloudmeshDatabase, VM, FLAVOR, IMAGE, DEFAULT
cm = CloudmeshDatabase(cm_user="gregor")
cm.update("vm", "india")
cm.update("flavor", "india")
cm.update("images", "india")
app = Flask(__name__)
"""
d = cm.flavors(clouds="india")
"""
@app.route('/list/')
@app.route('/list/<cloud>/<kind>/')
def list(cloud=None, kind=None):
if cloud is None:
cloud = "india"
if kind is None:
kind = flavor
print (kind, cloud)
# d = cm.list("flavor", cloud="india", output=dict)
d = cm.list(FLAVOR)
print("-------")
pprint (d)
print("-------")
name = "{:} {:}".format(cloud, kind)
order = {}
order['flavor'] = [
'cm_id',
'cm_user',
'disk',
'ephemeral_disk',
'group',
'id',
'label',
'name',
'price',
'ram',
'vcpus'
]
return render_template('list.html', name=name, data=d, order=order['flavor'])
@app.route('/hello/')
@app.route('/hello/<name>')
def hello(name=None):
return render_template('hello.html', name=name)
if __name__ == '__main__':
app.run(debug=True)
| apache-2.0 |
AdvancedClimateSystems/python-modbus | tests/system/validators.py | 2 | 2302 | import struct
def validate_transaction_id(request_mbap, response):
""" Check if Transaction id in request and response is equal. """
assert struct.unpack('>H', request_mbap[:2]) == \
struct.unpack('>H', response[:2])
def validate_protocol_id(request_mbap, response):
""" Check if Protocol id in request and response is equal. """
assert struct.unpack('>H', request_mbap[2:4]) == \
struct.unpack('>H', response[2:4])
def validate_length(response):
""" Check if Length field contains actual length of response. """
assert struct.unpack('>H', response[4:6])[0] == len(response[6:])
def validate_unit_id(request_mbap, response):
""" Check if Unit id in request and response is equal. """
assert struct.unpack('>B', request_mbap[6:7]) == \
struct.unpack('>B', response[6:7])
def validate_response_mbap(request_mbap, response):
""" Validate if fields in response MBAP contain correct values. """
validate_transaction_id(request_mbap, response)
validate_protocol_id(request_mbap, response)
validate_length(response)
validate_unit_id(request_mbap, response)
def validate_function_code(request, response):
""" Validate if Function code in request and response equal. """
assert struct.unpack('>B', request[7:8])[0] == \
struct.unpack('>B', response[7:8])[0]
def validate_single_bit_value_byte_count(request, response):
""" Check of byte count field contains actual byte count and if byte count
matches with the amount of requests quantity.
"""
byte_count = struct.unpack('>B', response[8:9])[0]
quantity = struct.unpack('>H', request[-2:])[0]
expected_byte_count = quantity // 8
if quantity % 8 != 0:
expected_byte_count = (quantity // 8) + 1
assert byte_count == len(response[9:])
assert byte_count == expected_byte_count
def validate_multi_bit_value_byte_count(request, response):
""" Check of byte count field contains actual byte count and if byte count
matches with the amount of requests quantity.
"""
byte_count = struct.unpack('>B', response[8:9])[0]
quantity = struct.unpack('>H', request[-2:])[0]
expected_byte_count = quantity * 2
assert byte_count == len(response[9:])
assert byte_count == expected_byte_count
| mpl-2.0 |
xpansa/pmis | stock_analytic_account/wizard/stock_change_product_qty.py | 2 | 3229 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv, orm
from openerp.tools.translate import _
from openerp import tools
class stock_change_product_qty(orm.TransientModel):
_inherit = "stock.change.product.qty"
_columns = {
'analytic_account_id': fields.many2one(
'account.analytic.account', 'Analytic Account'),
}
def change_product_qty(self, cr, uid, ids, context=None):
""" Changes the Product Quantity by making a Physical Inventory.
@param self: The object pointer.
@param cr: A database cursor
@param uid: ID of the user currently logged in
@param ids: List of IDs selected
@param context: A standard dictionary
@return:
"""
if context is None:
context = {}
rec_id = context and context.get('active_id', False)
assert rec_id, _('Active ID is not set in Context')
inventry_obj = self.pool.get('stock.inventory')
inventry_line_obj = self.pool.get('stock.inventory.line')
prod_obj_pool = self.pool.get('product.product')
res_original = prod_obj_pool.browse(cr, uid, rec_id, context=context)
for data in self.browse(cr, uid, ids, context=context):
if data.new_quantity < 0:
raise osv.except_osv(_('Warning!'),
_('Quantity cannot be negative.'))
inventory_id = inventry_obj.create(
cr, uid,
{'name': _('INV: %s') % tools.ustr(res_original.name)},
context=context)
line_data = {
'inventory_id': inventory_id,
'product_qty': data.new_quantity,
'location_id': data.location_id.id,
'product_id': rec_id,
'product_uom': res_original.uom_id.id,
'prod_lot_id': data.prodlot_id.id,
'analytic_account_id': data.analytic_account_id.id,
}
inventry_line_obj.create(cr, uid, line_data, context=context)
inventry_obj.action_confirm(cr, uid, [inventory_id],
context=context)
inventry_obj.action_done(cr, uid, [inventory_id], context=context)
return {}
| agpl-3.0 |
foxdog-studios/pyddp | ddp/messages/client/connect_message_parser.py | 1 | 1143 | # -*- coding: utf-8 -*-
# Copyright 2014 Foxdog Studios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .client_message_parser import ClientMessageParser
from .connect_message import ConnectMessage
from .constants import MSG_CONNECT
__all__ = ['ConnectMessageParser']
class ConnectMessageParser(ClientMessageParser):
MESSAGE_TYPE = MSG_CONNECT
def parse(self, pod):
return ConnectMessage(
pod['version'],
support=pod.get('support'),
session=pod.get('session'),
)
| apache-2.0 |
orinocoz/shinysdr | shinysdr/modes.py | 4 | 4569 | # Copyright 2013, 2014 Kevin Reid <kpreid@switchb.org>
#
# This file is part of ShinySDR.
#
# ShinySDR is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ShinySDR is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ShinySDR. If not, see <http://www.gnu.org/licenses/>.
# pylint: disable=no-method-argument, no-init
# (pylint is confused by interfaces)
from __future__ import absolute_import, division
from twisted.plugin import IPlugin, getPlugins
from zope.interface import Interface, implements # available via Twisted
from shinysdr import plugins
__all__ = [] # appended later
class IDemodulator(Interface):
def can_set_mode(mode):
'''
Return whether this demodulator can reconfigure itself to demodulate the specified mode.
If it returns False, it will typically be replaced with a newly created demodulator.
'''
def set_mode(mode):
'''
Per can_set_mode.
'''
def get_half_bandwidth():
'''
TODO explain
'''
def get_output_type():
'''
Return the SignalType of the demodulator's output, which must currently be stereo audio at any sample rate.
'''
__all__.append('IDemodulator')
class IModulator(Interface):
def can_set_mode(mode):
'''
Return whether this modulator can reconfigure itself to modulate the specified mode.
If it returns False, it will typically be replaced with a newly created modulator.
'''
def set_mode(mode):
'''
Per can_set_mode.
'''
def get_input_type():
'''
Return the SignalType of the modulator's required input, which must currently be mono audio at any sample rate.
'''
def get_output_type():
'''
Return the SignalType of the modulator's output, which must currently be IQ at any sample rate.
'''
class ITunableDemodulator(IDemodulator):
def set_rec_freq(freq):
'''
Set the nominal (carrier) frequency offset of the signal to be demodulated within the input signal.
'''
__all__.append('ITunableDemodulator')
class _IModeDef(Interface):
'''
Demodulator plugin interface object
'''
# Only needed to make the plugin system work
# TODO write interface methods anyway
class ModeDef(object):
implements(IPlugin, _IModeDef)
# Twisted plugin system caches whether-a-plugin-class-was-found permanently, so we need to avoid _not_ having a ModeDef if the plugin has some sort of dependency it checks -- thus the 'available' flag can be used to hide a mode while still having an _IModeDef
def __init__(self,
mode,
label,
demod_class,
mod_class=None,
shared_objects=None,
available=True):
'''
mode: String uniquely identifying this mode, typically a standard abbreviation written in uppercase letters (e.g. "USB").
label: String displayed to the user to identify this mode (e.g. "Broadcast FM").
demod_class: Class to instantiate to create a demodulator for this mode.
mod_class: Class to instantiate to create a modulator for this mode.
(TODO: cite demodulator and modulator interface docs)
shared_objects: TODO explain
available: If false, this mode definition will be ignored.
'''
if shared_objects is None:
shared_objects = {}
self.mode = mode
self.label = label
self.demod_class = demod_class
self.mod_class = mod_class
self.shared_objects = shared_objects
self.available = available
__all__.append('ModeDef')
def get_modes():
# TODO caching? prebuilt mode table?
return [p for p in getPlugins(_IModeDef, plugins) if p.available]
__all__.append('get_modes')
def lookup_mode(mode):
# TODO sensible lookup table (doesn't matter for now because small N)
for mode_def in get_modes():
if mode_def.mode == mode:
return mode_def
return None
__all__.append('lookup_mode')
| gpl-3.0 |
dhuang/incubator-airflow | airflow/operators/check_operator.py | 1 | 9666 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from builtins import zip
from builtins import str
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class CheckOperator(BaseOperator):
"""
Performs checks against a db. The ``CheckOperator`` expects
a sql query that will return a single row. Each value on that
first row is evaluated using python ``bool`` casting. If any of the
values return ``False`` the check is failed and errors out.
Note that Python bool casting evals the following as ``False``:
* ``False``
* ``0``
* Empty string (``""``)
* Empty list (``[]``)
* Empty dictionary or set (``{}``)
Given a query like ``SELECT COUNT(*) FROM foo``, it will fail only if
the count ``== 0``. You can craft much more complex query that could,
for instance, check that the table has the same number of rows as
the source table upstream, or that the count of today's partition is
greater than yesterday's partition, or that a set of metrics are less
than 3 standard deviation for the 7 day average.
This operator can be used as a data quality check in your pipeline, and
depending on where you put it in your DAG, you have the choice to
stop the critical path, preventing from
publishing dubious data, or on the side and receive email alerts
without stopping the progress of the DAG.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
template_fields = ('sql',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql,
conn_id=None,
*args, **kwargs):
super(CheckOperator, self).__init__(*args, **kwargs)
self.conn_id = conn_id
self.sql = sql
def execute(self, context=None):
self.log.info('Executing SQL check: %s', self.sql)
records = self.get_db_hook().get_first(self.sql)
self.log.info('Record: %s', records)
if not records:
raise AirflowException("The query returned None")
elif not all([bool(r) for r in records]):
exceptstr = "Test failed.\nQuery:\n{q}\nResults:\n{r!s}"
raise AirflowException(exceptstr.format(q=self.sql, r=records))
self.log.info("Success.")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
def _convert_to_float_if_possible(s):
'''
A small helper function to convert a string to a numeric value
if appropriate
:param s: the string to be converted
:type s: str
'''
try:
ret = float(s)
except (ValueError, TypeError):
ret = s
return ret
class ValueCheckOperator(BaseOperator):
"""
Performs a simple value check using sql code.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param sql: the sql to be executed
:type sql: string
"""
__mapper_args__ = {
'polymorphic_identity': 'ValueCheckOperator'
}
template_fields = ('sql', 'pass_value',)
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, sql, pass_value, tolerance=None,
conn_id=None,
*args, **kwargs):
super(ValueCheckOperator, self).__init__(*args, **kwargs)
self.sql = sql
self.conn_id = conn_id
self.pass_value = str(pass_value)
tol = _convert_to_float_if_possible(tolerance)
self.tol = tol if isinstance(tol, float) else None
self.has_tolerance = self.tol is not None
def execute(self, context=None):
self.log.info('Executing SQL check: %s', self.sql)
records = self.get_db_hook().get_first(self.sql)
if not records:
raise AirflowException("The query returned None")
pass_value_conv = _convert_to_float_if_possible(self.pass_value)
is_numeric_value_check = isinstance(pass_value_conv, float)
tolerance_pct_str = None
if (self.tol is not None):
tolerance_pct_str = str(self.tol * 100) + '%'
except_temp = ("Test failed.\nPass value:{pass_value_conv}\n"
"Tolerance:{tolerance_pct_str}\n"
"Query:\n{self.sql}\nResults:\n{records!s}")
if not is_numeric_value_check:
tests = [str(r) == pass_value_conv for r in records]
elif is_numeric_value_check:
try:
num_rec = [float(r) for r in records]
except (ValueError, TypeError) as e:
cvestr = "Converting a result to float failed.\n"
raise AirflowException(cvestr + except_temp.format(**locals()))
if self.has_tolerance:
tests = [
pass_value_conv * (1 - self.tol) <=
r <= pass_value_conv * (1 + self.tol)
for r in num_rec]
else:
tests = [r == pass_value_conv for r in num_rec]
if not all(tests):
raise AirflowException(except_temp.format(**locals()))
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
class IntervalCheckOperator(BaseOperator):
"""
Checks that the values of metrics given as SQL expressions are within
a certain tolerance of the ones from days_back before.
Note that this is an abstract class and get_db_hook
needs to be defined. Whereas a get_db_hook is hook that gets a
single record from an external source.
:param table: the table name
:type table: str
:param days_back: number of days between ds and the ds we want to check
against. Defaults to 7 days
:type days_back: int
:param metrics_threshold: a dictionary of ratios indexed by metrics
:type metrics_threshold: dict
"""
__mapper_args__ = {
'polymorphic_identity': 'IntervalCheckOperator'
}
template_fields = ('sql1', 'sql2')
template_ext = ('.hql', '.sql',)
ui_color = '#fff7e6'
@apply_defaults
def __init__(
self, table, metrics_thresholds,
date_filter_column='ds', days_back=-7,
conn_id=None,
*args, **kwargs):
super(IntervalCheckOperator, self).__init__(*args, **kwargs)
self.table = table
self.metrics_thresholds = metrics_thresholds
self.metrics_sorted = sorted(metrics_thresholds.keys())
self.date_filter_column = date_filter_column
self.days_back = -abs(days_back)
self.conn_id = conn_id
sqlexp = ', '.join(self.metrics_sorted)
sqlt = ("SELECT {sqlexp} FROM {table}"
" WHERE {date_filter_column}=").format(**locals())
self.sql1 = sqlt + "'{{ ds }}'"
self.sql2 = sqlt + "'{{ macros.ds_add(ds, "+str(self.days_back)+") }}'"
def execute(self, context=None):
hook = self.get_db_hook()
self.log.info('Executing SQL check: %s', self.sql2)
row2 = hook.get_first(self.sql2)
self.log.info('Executing SQL check: %s', self.sql1)
row1 = hook.get_first(self.sql1)
if not row2:
raise AirflowException("The query {q} returned None".format(q=self.sql2))
if not row1:
raise AirflowException("The query {q} returned None".format(q=self.sql1))
current = dict(zip(self.metrics_sorted, row1))
reference = dict(zip(self.metrics_sorted, row2))
ratios = {}
test_results = {}
rlog = "Ratio for {0}: {1} \n Ratio threshold : {2}"
fstr = "'{k}' check failed. {r} is above {tr}"
estr = "The following tests have failed:\n {0}"
countstr = "The following {j} tests out of {n} failed:"
for m in self.metrics_sorted:
if current[m] == 0 or reference[m] == 0:
ratio = None
else:
ratio = float(max(current[m], reference[m])) / \
min(current[m], reference[m])
self.log.info(rlog.format(m, ratio, self.metrics_thresholds[m]))
ratios[m] = ratio
test_results[m] = ratio < self.metrics_thresholds[m]
if not all(test_results.values()):
failed_tests = [it[0] for it in test_results.items() if not it[1]]
j = len(failed_tests)
n = len(self.metrics_sorted)
self.log.warning(countstr.format(**locals()))
for k in failed_tests:
self.log.warning(
fstr.format(k=k, r=ratios[k], tr=self.metrics_thresholds[k])
)
raise AirflowException(estr.format(", ".join(failed_tests)))
self.log.info("All tests have passed")
def get_db_hook(self):
return BaseHook.get_hook(conn_id=self.conn_id)
| apache-2.0 |
Silmathoron/NNGT | testing/test_mpi.py | 1 | 4873 | #!/usr/bin/env python
#-*- coding:utf-8 -*-
#
# test_mpi.py
#
# This file is part of the NNGT project to generate and analyze
# neuronal networks and their activity.
# Copyright (C) 2015-2019 Tanguy Fardet
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the main methods of the :mod:`~nngt.generation` module.
"""
import os
import unittest
import numpy as np
import nngt
from nngt.analysis import *
from nngt.lib.connect_tools import _compute_connections
from base_test import TestBasis, XmlHandler, network_dir
from test_generation import _distance_rule_theo, _distance_rule_exp
from tools_testing import foreach_graph
if os.environ.get("MPI"):
nngt.set_config("mpi", True)
# -------- #
# Test MPI #
# -------- #
class TestMPI(TestBasis):
'''
Class testing the main methods of the :mod:`~nngt.generation` module.
'''
theo_prop = {
"distance_rule": _distance_rule_theo,
}
exp_prop = {
"distance_rule": _distance_rule_exp,
}
tolerance = 0.08
@property
def test_name(self):
return "test_mpi"
@unittest.skipIf(not nngt.get_config('mpi'), "Not using MPI.")
def gen_graph(self, graph_name):
di_instructions = self.parser.get_graph_options(graph_name)
graph = nngt.generate(di_instructions)
if nngt.on_master_process():
graph.set_name(graph_name)
return graph, di_instructions
@foreach_graph
def test_model_properties(self, graph, instructions, **kwargs):
'''
When generating graphs from on of the preconfigured models, check that
the expected properties are indeed obtained.
'''
if nngt.get_config("backend") != "nngt" and nngt.on_master_process():
graph_type = instructions["graph_type"]
ref_result = self.theo_prop[graph_type](instructions)
computed_result = self.exp_prop[graph_type](graph, instructions)
if graph_type == 'distance_rule':
# average degree
self.assertTrue(
ref_result[0] == computed_result[0],
"Avg. deg. for graph {} failed:\nref = {} vs exp {}\
".format(graph.name, ref_result[0], computed_result[0]))
# average error on distance distribution
sqd = np.square(
np.subtract(ref_result[1:], computed_result[1:]))
avg_sqd = sqd / np.square(computed_result[1:])
err = np.sqrt(avg_sqd).mean()
tolerance = (self.tolerance if instructions['rule'] == 'lin'
else 0.25)
self.assertTrue(err <= tolerance,
"Distance distribution for graph {} failed:\nerr = {} > {}\
".format(graph.name, err, tolerance))
elif nngt.get_config("backend") == "nngt":
from mpi4py import MPI
comm = MPI.COMM_WORLD
num_proc = comm.Get_size()
graph_type = instructions["graph_type"]
ref_result = self.theo_prop[graph_type](instructions)
computed_result = self.exp_prop[graph_type](graph, instructions)
if graph_type == 'distance_rule':
# average degree
self.assertTrue(
ref_result[0] == computed_result[0] * num_proc,
"Avg. deg. for graph {} failed:\nref = {} vs exp {}\
".format(graph.name, ref_result[0], computed_result[0]))
# average error on distance distribution
sqd = np.square(
np.subtract(ref_result[1:], computed_result[1:]))
avg_sqd = sqd / np.square(computed_result[1:])
err = np.sqrt(avg_sqd).mean()
tolerance = (self.tolerance if instructions['rule'] == 'lin'
else 0.25)
self.assertTrue(err <= tolerance,
"Distance distribution for graph {} failed:\nerr = {} > {}\
".format(graph.name, err, tolerance))
# ---------- #
# Test suite #
# ---------- #
if nngt.get_config('mpi'):
suite = unittest.TestLoader().loadTestsFromTestCase(TestMPI)
if __name__ == "__main__":
unittest.main()
| gpl-3.0 |
RJVB/audacity | lib-src/lv2/lv2/plugins/eg01-amp.lv2/waflib/Tools/msvc.py | 70 | 27831 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os,sys,re,tempfile
from waflib import Utils,Task,Logs,Options,Errors
from waflib.Logs import debug,warn
from waflib.TaskGen import after_method,feature
from waflib.Configure import conf
from waflib.Tools import ccroot,c,cxx,ar,winres
g_msvc_systemlibs='''
aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
'''.split()
all_msvc_platforms=[('x64','amd64'),('x86','x86'),('ia64','ia64'),('x86_amd64','amd64'),('x86_ia64','ia64'),('x86_arm','arm')]
all_wince_platforms=[('armv4','arm'),('armv4i','arm'),('mipsii','mips'),('mipsii_fp','mips'),('mipsiv','mips'),('mipsiv_fp','mips'),('sh4','sh'),('x86','cex86')]
all_icl_platforms=[('intel64','amd64'),('em64t','amd64'),('ia32','x86'),('Itanium','ia64')]
def options(opt):
opt.add_option('--msvc_version',type='string',help='msvc version, eg: "msvc 10.0,msvc 9.0"',default='')
opt.add_option('--msvc_targets',type='string',help='msvc targets, eg: "x64,arm"',default='')
def setup_msvc(conf,versions,arch=False):
platforms=getattr(Options.options,'msvc_targets','').split(',')
if platforms==['']:
platforms=Utils.to_list(conf.env['MSVC_TARGETS'])or[i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
desired_versions=getattr(Options.options,'msvc_version','').split(',')
if desired_versions==['']:
desired_versions=conf.env['MSVC_VERSIONS']or[v for v,_ in versions][::-1]
versiondict=dict(versions)
for version in desired_versions:
try:
targets=dict(versiondict[version])
for target in platforms:
try:
arch,(p1,p2,p3)=targets[target]
compiler,revision=version.rsplit(' ',1)
if arch:
return compiler,revision,p1,p2,p3,arch
else:
return compiler,revision,p1,p2,p3
except KeyError:continue
except KeyError:continue
conf.fatal('msvc: Impossible to find a valid architecture for building (in setup_msvc)')
@conf
def get_msvc_version(conf,compiler,version,target,vcvars):
debug('msvc: get_msvc_version: %r %r %r',compiler,version,target)
batfile=conf.bldnode.make_node('waf-print-msvc.bat')
batfile.write("""@echo off
set INCLUDE=
set LIB=
call "%s" %s
echo PATH=%%PATH%%
echo INCLUDE=%%INCLUDE%%
echo LIB=%%LIB%%;%%LIBPATH%%
"""%(vcvars,target))
sout=conf.cmd_and_log(['cmd','/E:on','/V:on','/C',batfile.abspath()])
lines=sout.splitlines()
if not lines[0]:
lines.pop(0)
MSVC_PATH=MSVC_INCDIR=MSVC_LIBDIR=None
for line in lines:
if line.startswith('PATH='):
path=line[5:]
MSVC_PATH=path.split(';')
elif line.startswith('INCLUDE='):
MSVC_INCDIR=[i for i in line[8:].split(';')if i]
elif line.startswith('LIB='):
MSVC_LIBDIR=[i for i in line[4:].split(';')if i]
if None in(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR):
conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
env=dict(os.environ)
env.update(PATH=path)
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
cxx=conf.find_program(compiler_name,path_list=MSVC_PATH)
cxx=conf.cmd_to_list(cxx)
if'CL'in env:
del(env['CL'])
try:
try:
conf.cmd_and_log(cxx+['/help'],env=env)
except Exception ,e:
debug('msvc: get_msvc_version: %r %r %r -> failure'%(compiler,version,target))
debug(str(e))
conf.fatal('msvc: cannot run the compiler (in get_msvc_version)')
else:
debug('msvc: get_msvc_version: %r %r %r -> OK',compiler,version,target)
finally:
conf.env[compiler_name]=''
return(MSVC_PATH,MSVC_INCDIR,MSVC_LIBDIR)
@conf
def gather_wsdk_versions(conf,versions):
version_pattern=re.compile('^v..?.?\...?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
try:
msvc_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
except WindowsError:
continue
if os.path.isfile(os.path.join(path,'bin','SetEnv.cmd')):
targets=[]
for target,arch in all_msvc_platforms:
try:
targets.append((target,(arch,conf.get_msvc_version('wsdk',version,'/'+target,os.path.join(path,'bin','SetEnv.cmd')))))
except conf.errors.ConfigurationError:
pass
versions.append(('wsdk '+version[1:],targets))
def gather_wince_supported_platforms():
supported_wince_platforms=[]
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
try:
ce_sdk=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
except WindowsError:
ce_sdk=''
if not ce_sdk:
return supported_wince_platforms
ce_index=0
while 1:
try:
sdk_device=Utils.winreg.EnumKey(ce_sdk,ce_index)
except WindowsError:
break
ce_index=ce_index+1
sdk=Utils.winreg.OpenKey(ce_sdk,sdk_device)
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKRootDir')
except WindowsError:
try:
path,type=Utils.winreg.QueryValueEx(sdk,'SDKInformation')
path,xml=os.path.split(path)
except WindowsError:
continue
path=str(path)
path,device=os.path.split(path)
if not device:
path,device=os.path.split(path)
for arch,compiler in all_wince_platforms:
platforms=[]
if os.path.isdir(os.path.join(path,device,'Lib',arch)):
platforms.append((arch,compiler,os.path.join(path,device,'Include',arch),os.path.join(path,device,'Lib',arch)))
if platforms:
supported_wince_platforms.append((device,platforms))
return supported_wince_platforms
def gather_msvc_detected_versions():
version_pattern=re.compile('^(\d\d?\.\d\d?)(Exp)?$')
detected_versions=[]
for vcver,vcvar in[('VCExpress','Exp'),('VisualStudio','')]:
try:
prefix='SOFTWARE\\Wow6432node\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
try:
prefix='SOFTWARE\\Microsoft\\'+vcver
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,prefix)
except WindowsError:
continue
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
match=version_pattern.match(version)
if not match:
continue
else:
versionnumber=float(match.group(1))
detected_versions.append((versionnumber,version+vcvar,prefix+"\\"+version))
def fun(tup):
return tup[0]
detected_versions.sort(key=fun)
return detected_versions
@conf
def gather_msvc_targets(conf,versions,version,vc_path):
targets=[]
if os.path.isfile(os.path.join(vc_path,'vcvarsall.bat')):
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('msvc',version,target,os.path.join(vc_path,'vcvarsall.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Common7','Tools','vsvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'x86',os.path.join(vc_path,'Common7','Tools','vsvars32.bat')))))
except conf.errors.ConfigurationError:
pass
elif os.path.isfile(os.path.join(vc_path,'Bin','vcvars32.bat')):
try:
targets.append(('x86',('x86',conf.get_msvc_version('msvc',version,'',os.path.join(vc_path,'Bin','vcvars32.bat')))))
except conf.errors.ConfigurationError:
pass
if targets:
versions.append(('msvc '+version,targets))
@conf
def gather_wince_targets(conf,versions,version,vc_path,vsvars,supported_platforms):
for device,platforms in supported_platforms:
cetargets=[]
for platform,compiler,include,lib in platforms:
winCEpath=os.path.join(vc_path,'ce')
if not os.path.isdir(winCEpath):
continue
try:
common_bindirs,_1,_2=conf.get_msvc_version('msvc',version,'x86',vsvars)
except conf.errors.ConfigurationError:
continue
if os.path.isdir(os.path.join(winCEpath,'lib',platform)):
bindirs=[os.path.join(winCEpath,'bin',compiler),os.path.join(winCEpath,'bin','x86_'+compiler)]+common_bindirs
incdirs=[os.path.join(winCEpath,'include'),os.path.join(winCEpath,'atlmfc','include'),include]
libdirs=[os.path.join(winCEpath,'lib',platform),os.path.join(winCEpath,'atlmfc','lib',platform),lib]
cetargets.append((platform,(platform,(bindirs,incdirs,libdirs))))
if cetargets:
versions.append((device+' '+version,cetargets))
@conf
def gather_winphone_targets(conf,versions,version,vc_path,vsvars):
targets=[]
for target,realtarget in all_msvc_platforms[::-1]:
try:
targets.append((target,(realtarget,conf.get_msvc_version('winphone',version,target,vsvars))))
except conf.errors.ConfigurationError ,e:
pass
if targets:
versions.append(('winphone '+version,targets))
@conf
def gather_msvc_versions(conf,versions):
vc_paths=[]
for(v,version,reg)in gather_msvc_detected_versions():
try:
try:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\VC")
except WindowsError:
msvc_version=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,reg+"\\Setup\\Microsoft Visual C++")
path,type=Utils.winreg.QueryValueEx(msvc_version,'ProductDir')
vc_paths.append((version,os.path.abspath(str(path))))
except WindowsError:
continue
wince_supported_platforms=gather_wince_supported_platforms()
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
vsvars=os.path.join(vs_path,'Common7','Tools','vsvars32.bat')
if wince_supported_platforms and os.path.isfile(vsvars):
conf.gather_wince_targets(versions,version,vc_path,vsvars,wince_supported_platforms)
vsvars=os.path.join(vs_path,'VC','WPSDK','WP80','vcvarsphoneall.bat')
if os.path.isfile(vsvars):
conf.gather_winphone_targets(versions,'8.0',vc_path,vsvars)
for version,vc_path in vc_paths:
vs_path=os.path.dirname(vc_path)
conf.gather_msvc_targets(versions,version,vc_path)
@conf
def gather_icl_versions(conf,versions):
version_pattern=re.compile('^...?.?\....?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Compilers\\C++')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
pass
for target,arch in all_icl_platforms:
try:
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+target)
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError:
pass
except WindowsError:
continue
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def gather_intel_composer_versions(conf,versions):
version_pattern=re.compile('^...?.?\...?.?.?')
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Wow6432node\\Intel\\Suites')
except WindowsError:
try:
all_versions=Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE,'SOFTWARE\\Intel\\Suites')
except WindowsError:
return
index=0
while 1:
try:
version=Utils.winreg.EnumKey(all_versions,index)
except WindowsError:
break
index=index+1
if not version_pattern.match(version):
continue
targets=[]
for target,arch in all_icl_platforms:
try:
if target=='intel64':targetDir='EM64T_NATIVE'
else:targetDir=target
try:
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
except WindowsError:
if targetDir=='EM64T_NATIVE':
defaults=Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
else:
raise WindowsError
uid,type=Utils.winreg.QueryValueEx(defaults,'SubKey')
Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
batch_file=os.path.join(path,'bin','iclvars.bat')
if os.path.isfile(batch_file):
try:
targets.append((target,(arch,conf.get_msvc_version('intel',version,target,batch_file))))
except conf.errors.ConfigurationError ,e:
pass
compilervars_warning_attr='_compilervars_warning_key'
if version[0:2]=='13'and getattr(conf,compilervars_warning_attr,True):
setattr(conf,compilervars_warning_attr,False)
patch_url='http://software.intel.com/en-us/forums/topic/328487'
compilervars_arch=os.path.join(path,'bin','compilervars_arch.bat')
for vscomntools in['VS110COMNTOOLS','VS100COMNTOOLS']:
if os.environ.has_key(vscomntools):
vs_express_path=os.environ[vscomntools]+r'..\IDE\VSWinExpress.exe'
dev_env_path=os.environ[vscomntools]+r'..\IDE\devenv.exe'
if(r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"'in Utils.readf(compilervars_arch)and not os.path.exists(vs_express_path)and not os.path.exists(dev_env_path)):
Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU ''(VSWinExpress.exe) but it does not seem to be installed at %r. ''The intel command line set up will fail to configure unless the file %r''is patched. See: %s')%(vs_express_path,compilervars_arch,patch_url))
except WindowsError:
pass
major=version[0:2]
versions.append(('intel '+major,targets))
@conf
def get_msvc_versions(conf):
if not conf.env['MSVC_INSTALLED_VERSIONS']:
lst=[]
conf.gather_icl_versions(lst)
conf.gather_intel_composer_versions(lst)
conf.gather_wsdk_versions(lst)
conf.gather_msvc_versions(lst)
conf.env['MSVC_INSTALLED_VERSIONS']=lst
return conf.env['MSVC_INSTALLED_VERSIONS']
@conf
def print_all_msvc_detected(conf):
for version,targets in conf.env['MSVC_INSTALLED_VERSIONS']:
Logs.info(version)
for target,l in targets:
Logs.info("\t"+target)
@conf
def detect_msvc(conf,arch=False):
versions=get_msvc_versions(conf)
return setup_msvc(conf,versions,arch)
@conf
def find_lt_names_msvc(self,libname,is_static=False):
lt_names=['lib%s.la'%libname,'%s.la'%libname,]
for path in self.env['LIBPATH']:
for la in lt_names:
laf=os.path.join(path,la)
dll=None
if os.path.exists(laf):
ltdict=Utils.read_la_file(laf)
lt_libdir=None
if ltdict.get('libdir',''):
lt_libdir=ltdict['libdir']
if not is_static and ltdict.get('library_names',''):
dllnames=ltdict['library_names'].split()
dll=dllnames[0].lower()
dll=re.sub('\.dll$','',dll)
return(lt_libdir,dll,False)
elif ltdict.get('old_library',''):
olib=ltdict['old_library']
if os.path.exists(os.path.join(path,olib)):
return(path,olib,True)
elif lt_libdir!=''and os.path.exists(os.path.join(lt_libdir,olib)):
return(lt_libdir,olib,True)
else:
return(None,olib,True)
else:
raise self.errors.WafError('invalid libtool object file: %s'%laf)
return(None,None,None)
@conf
def libname_msvc(self,libname,is_static=False):
lib=libname.lower()
lib=re.sub('\.lib$','',lib)
if lib in g_msvc_systemlibs:
return lib
lib=re.sub('^lib','',lib)
if lib=='m':
return None
(lt_path,lt_libname,lt_static)=self.find_lt_names_msvc(lib,is_static)
if lt_path!=None and lt_libname!=None:
if lt_static==True:
return os.path.join(lt_path,lt_libname)
if lt_path!=None:
_libpaths=[lt_path]+self.env['LIBPATH']
else:
_libpaths=self.env['LIBPATH']
static_libs=['lib%ss.lib'%lib,'lib%s.lib'%lib,'%ss.lib'%lib,'%s.lib'%lib,]
dynamic_libs=['lib%s.dll.lib'%lib,'lib%s.dll.a'%lib,'%s.dll.lib'%lib,'%s.dll.a'%lib,'lib%s_d.lib'%lib,'%s_d.lib'%lib,'%s.lib'%lib,]
libnames=static_libs
if not is_static:
libnames=dynamic_libs+static_libs
for path in _libpaths:
for libn in libnames:
if os.path.exists(os.path.join(path,libn)):
debug('msvc: lib found: %s'%os.path.join(path,libn))
return re.sub('\.lib$','',libn)
self.fatal("The library %r could not be found"%libname)
return re.sub('\.lib$','',libname)
@conf
def check_lib_msvc(self,libname,is_static=False,uselib_store=None):
libn=self.libname_msvc(libname,is_static)
if not uselib_store:
uselib_store=libname.upper()
if False and is_static:
self.env['STLIB_'+uselib_store]=[libn]
else:
self.env['LIB_'+uselib_store]=[libn]
@conf
def check_libs_msvc(self,libnames,is_static=False):
for libname in Utils.to_list(libnames):
self.check_lib_msvc(libname,is_static)
def configure(conf):
conf.autodetect(True)
conf.find_msvc()
conf.msvc_common_flags()
conf.cc_load_tools()
conf.cxx_load_tools()
conf.cc_add_flags()
conf.cxx_add_flags()
conf.link_add_flags()
conf.visual_studio_add_flags()
@conf
def no_autodetect(conf):
conf.env.NO_MSVC_DETECT=1
configure(conf)
@conf
def autodetect(conf,arch=False):
v=conf.env
if v.NO_MSVC_DETECT:
return
if arch:
compiler,version,path,includes,libdirs,arch=conf.detect_msvc(True)
v['DEST_CPU']=arch
else:
compiler,version,path,includes,libdirs=conf.detect_msvc()
v['PATH']=path
v['INCLUDES']=includes
v['LIBPATH']=libdirs
v['MSVC_COMPILER']=compiler
try:
v['MSVC_VERSION']=float(version)
except Exception:
v['MSVC_VERSION']=float(version[:-3])
def _get_prog_names(conf,compiler):
if compiler=='intel':
compiler_name='ICL'
linker_name='XILINK'
lib_name='XILIB'
else:
compiler_name='CL'
linker_name='LINK'
lib_name='LIB'
return compiler_name,linker_name,lib_name
@conf
def find_msvc(conf):
if sys.platform=='cygwin':
conf.fatal('MSVC module does not work under cygwin Python!')
v=conf.env
path=v['PATH']
compiler=v['MSVC_COMPILER']
version=v['MSVC_VERSION']
compiler_name,linker_name,lib_name=_get_prog_names(conf,compiler)
v.MSVC_MANIFEST=(compiler=='msvc'and version>=8)or(compiler=='wsdk'and version>=6)or(compiler=='intel'and version>=11)
cxx=None
if v['CXX']:cxx=v['CXX']
elif'CXX'in conf.environ:cxx=conf.environ['CXX']
cxx=conf.find_program(compiler_name,var='CXX',path_list=path)
cxx=conf.cmd_to_list(cxx)
env=dict(conf.environ)
if path:env.update(PATH=';'.join(path))
if not conf.cmd_and_log(cxx+['/nologo','/help'],env=env):
conf.fatal('the msvc compiler could not be identified')
v['CC']=v['CXX']=cxx
v['CC_NAME']=v['CXX_NAME']='msvc'
if not v['LINK_CXX']:
link=conf.find_program(linker_name,path_list=path)
if link:v['LINK_CXX']=link
else:conf.fatal('%s was not found (linker)'%linker_name)
v['LINK']=link
if not v['LINK_CC']:
v['LINK_CC']=v['LINK_CXX']
if not v['AR']:
stliblink=conf.find_program(lib_name,path_list=path,var='AR')
if not stliblink:return
v['ARFLAGS']=['/NOLOGO']
if v.MSVC_MANIFEST:
conf.find_program('MT',path_list=path,var='MT')
v['MTFLAGS']=['/NOLOGO']
try:
conf.load('winres')
except Errors.WafError:
warn('Resource compiler not found. Compiling resource file is disabled')
@conf
def visual_studio_add_flags(self):
v=self.env
try:v.prepend_value('INCLUDES',[x for x in self.environ['INCLUDE'].split(';')if x])
except Exception:pass
try:v.prepend_value('LIBPATH',[x for x in self.environ['LIB'].split(';')if x])
except Exception:pass
@conf
def msvc_common_flags(conf):
v=conf.env
v['DEST_BINFMT']='pe'
v.append_value('CFLAGS',['/nologo'])
v.append_value('CXXFLAGS',['/nologo'])
v['DEFINES_ST']='/D%s'
v['CC_SRC_F']=''
v['CC_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CC_TGT_F']=['/FC']+v['CC_TGT_F']
v['CXX_SRC_F']=''
v['CXX_TGT_F']=['/c','/Fo']
if v['MSVC_VERSION']>=8:
v['CXX_TGT_F']=['/FC']+v['CXX_TGT_F']
v['CPPPATH_ST']='/I%s'
v['AR_TGT_F']=v['CCLNK_TGT_F']=v['CXXLNK_TGT_F']='/OUT:'
v['CFLAGS_CONSOLE']=v['CXXFLAGS_CONSOLE']=['/SUBSYSTEM:CONSOLE']
v['CFLAGS_NATIVE']=v['CXXFLAGS_NATIVE']=['/SUBSYSTEM:NATIVE']
v['CFLAGS_POSIX']=v['CXXFLAGS_POSIX']=['/SUBSYSTEM:POSIX']
v['CFLAGS_WINDOWS']=v['CXXFLAGS_WINDOWS']=['/SUBSYSTEM:WINDOWS']
v['CFLAGS_WINDOWSCE']=v['CXXFLAGS_WINDOWSCE']=['/SUBSYSTEM:WINDOWSCE']
v['CFLAGS_CRT_MULTITHREADED']=v['CXXFLAGS_CRT_MULTITHREADED']=['/MT']
v['CFLAGS_CRT_MULTITHREADED_DLL']=v['CXXFLAGS_CRT_MULTITHREADED_DLL']=['/MD']
v['CFLAGS_CRT_MULTITHREADED_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DBG']=['/MTd']
v['CFLAGS_CRT_MULTITHREADED_DLL_DBG']=v['CXXFLAGS_CRT_MULTITHREADED_DLL_DBG']=['/MDd']
v['LIB_ST']='%s.lib'
v['LIBPATH_ST']='/LIBPATH:%s'
v['STLIB_ST']='%s.lib'
v['STLIBPATH_ST']='/LIBPATH:%s'
v.append_value('LINKFLAGS',['/NOLOGO'])
if v['MSVC_MANIFEST']:
v.append_value('LINKFLAGS',['/MANIFEST'])
v['CFLAGS_cshlib']=[]
v['CXXFLAGS_cxxshlib']=[]
v['LINKFLAGS_cshlib']=v['LINKFLAGS_cxxshlib']=['/DLL']
v['cshlib_PATTERN']=v['cxxshlib_PATTERN']='%s.dll'
v['implib_PATTERN']='%s.lib'
v['IMPLIB_ST']='/IMPLIB:%s'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']=v['cxxstlib_PATTERN']='%s.lib'
v['cprogram_PATTERN']=v['cxxprogram_PATTERN']='%s.exe'
@after_method('apply_link')
@feature('c','cxx')
def apply_flags_msvc(self):
if self.env.CC_NAME!='msvc'or not getattr(self,'link_task',None):
return
is_static=isinstance(self.link_task,ccroot.stlink_task)
subsystem=getattr(self,'subsystem','')
if subsystem:
subsystem='/subsystem:%s'%subsystem
flags=is_static and'ARFLAGS'or'LINKFLAGS'
self.env.append_value(flags,subsystem)
if not is_static:
for f in self.env.LINKFLAGS:
d=f.lower()
if d[1:]=='debug':
pdbnode=self.link_task.outputs[0].change_ext('.pdb')
self.link_task.outputs.append(pdbnode)
try:
self.install_task.source.append(pdbnode)
except AttributeError:
pass
break
@feature('cprogram','cshlib','cxxprogram','cxxshlib')
@after_method('apply_link')
def apply_manifest(self):
if self.env.CC_NAME=='msvc'and self.env.MSVC_MANIFEST and getattr(self,'link_task',None):
out_node=self.link_task.outputs[0]
man_node=out_node.parent.find_or_declare(out_node.name+'.manifest')
self.link_task.outputs.append(man_node)
self.link_task.do_manifest=True
def exec_mf(self):
env=self.env
mtool=env['MT']
if not mtool:
return 0
self.do_manifest=False
outfile=self.outputs[0].abspath()
manifest=None
for out_node in self.outputs:
if out_node.name.endswith('.manifest'):
manifest=out_node.abspath()
break
if manifest is None:
return 0
mode=''
if'cprogram'in self.generator.features or'cxxprogram'in self.generator.features:
mode='1'
elif'cshlib'in self.generator.features or'cxxshlib'in self.generator.features:
mode='2'
debug('msvc: embedding manifest in mode %r'%mode)
lst=[]
lst.append(env['MT'])
lst.extend(Utils.to_list(env['MTFLAGS']))
lst.extend(['-manifest',manifest])
lst.append('-outputresource:%s;%s'%(outfile,mode))
lst=[lst]
return self.exec_command(*lst)
def quote_response_command(self,flag):
if flag.find(' ')>-1:
for x in('/LIBPATH:','/IMPLIB:','/OUT:','/I'):
if flag.startswith(x):
flag='%s"%s"'%(x,flag[len(x):])
break
else:
flag='"%s"'%flag
return flag
def exec_response_command(self,cmd,**kw):
try:
tmp=None
if sys.platform.startswith('win')and isinstance(cmd,list)and len(' '.join(cmd))>=8192:
program=cmd[0]
cmd=[self.quote_response_command(x)for x in cmd]
(fd,tmp)=tempfile.mkstemp()
os.write(fd,'\r\n'.join(i.replace('\\','\\\\')for i in cmd[1:]))
os.close(fd)
cmd=[program,'@'+tmp]
ret=self.generator.bld.exec_command(cmd,**kw)
finally:
if tmp:
try:
os.remove(tmp)
except OSError:
pass
return ret
def exec_command_msvc(self,*k,**kw):
if isinstance(k[0],list):
lst=[]
carry=''
for a in k[0]:
if a=='/Fo'or a=='/doc'or a[-1]==':':
carry=a
else:
lst.append(carry+a)
carry=''
k=[lst]
if self.env['PATH']:
env=dict(self.env.env or os.environ)
env.update(PATH=';'.join(self.env['PATH']))
kw['env']=env
bld=self.generator.bld
try:
if not kw.get('cwd',None):
kw['cwd']=bld.cwd
except AttributeError:
bld.cwd=kw['cwd']=bld.variant_dir
ret=self.exec_response_command(k[0],**kw)
if not ret and getattr(self,'do_manifest',None):
ret=self.exec_mf()
return ret
def wrap_class(class_name):
cls=Task.classes.get(class_name,None)
if not cls:
return None
derived_class=type(class_name,(cls,),{})
def exec_command(self,*k,**kw):
if self.env['CC_NAME']=='msvc':
return self.exec_command_msvc(*k,**kw)
else:
return super(derived_class,self).exec_command(*k,**kw)
derived_class.exec_command=exec_command
derived_class.exec_response_command=exec_response_command
derived_class.quote_response_command=quote_response_command
derived_class.exec_command_msvc=exec_command_msvc
derived_class.exec_mf=exec_mf
return derived_class
for k in'c cxx cprogram cxxprogram cshlib cxxshlib cstlib cxxstlib'.split():
wrap_class(k)
def make_winapp(self,family):
append=self.env.append_unique
append('DEFINES','WINAPI_FAMILY=%s'%family)
append('CXXFLAGS','/ZW')
append('CXXFLAGS','/TP')
for lib_path in self.env.LIBPATH:
append('CXXFLAGS','/AI%s'%lib_path)
@feature('winphoneapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_winphone_app(self):
make_winapp(self,'WINAPI_FAMILY_PHONE_APP')
conf.env.append_unique('LINKFLAGS','/NODEFAULTLIB:ole32.lib')
conf.env.append_unique('LINKFLAGS','PhoneAppModelHost.lib')
@feature('winapp')
@after_method('process_use')
@after_method('propagate_uselib_vars')
def make_windows_app(self):
make_winapp(self,'WINAPI_FAMILY_DESKTOP_APP')
| gpl-2.0 |
mandeepdhami/neutron | neutron/db/migration/alembic_migrations/mlnx_init_ops.py | 32 | 1999 | # Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# Initial operations for the Mellanox plugin
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'segmentation_id_allocation',
sa.Column('physical_network', sa.String(length=64), nullable=False),
sa.Column('segmentation_id', sa.Integer(), autoincrement=False,
nullable=False),
sa.Column('allocated', sa.Boolean(), nullable=False,
server_default=sa.sql.false()),
sa.PrimaryKeyConstraint('physical_network', 'segmentation_id'))
op.create_table(
'mlnx_network_bindings',
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('network_type', sa.String(length=32), nullable=False),
sa.Column('physical_network', sa.String(length=64), nullable=True),
sa.Column('segmentation_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['network_id'], ['networks.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('network_id'))
op.create_table(
'port_profile',
sa.Column('port_id', sa.String(length=36), nullable=False),
sa.Column('vnic_type', sa.String(length=32), nullable=False),
sa.ForeignKeyConstraint(['port_id'], ['ports.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('port_id'))
| apache-2.0 |
bailey1234/hyeri7846 | lib/werkzeug/contrib/atom.py | 311 | 15281 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.atom
~~~~~~~~~~~~~~~~~~~~~
This module provides a class called :class:`AtomFeed` which can be
used to generate feeds in the Atom syndication format (see :rfc:`4287`).
Example::
def atom_feed(request):
feed = AtomFeed("My Blog", feed_url=request.url,
url=request.host_url,
subtitle="My example blog for a feed test.")
for post in Post.query.limit(10).all():
feed.add(post.title, post.body, content_type='html',
author=post.author, url=post.url, id=post.uid,
updated=post.last_update, published=post.pub_date)
return feed.get_response()
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from datetime import datetime
from werkzeug.utils import escape
from werkzeug.wrappers import BaseResponse
from werkzeug._compat import implements_to_string, string_types
XHTML_NAMESPACE = 'http://www.w3.org/1999/xhtml'
def _make_text_block(name, content, content_type=None):
"""Helper function for the builder that creates an XML text block."""
if content_type == 'xhtml':
return u'<%s type="xhtml"><div xmlns="%s">%s</div></%s>\n' % \
(name, XHTML_NAMESPACE, content, name)
if not content_type:
return u'<%s>%s</%s>\n' % (name, escape(content), name)
return u'<%s type="%s">%s</%s>\n' % (name, content_type,
escape(content), name)
def format_iso8601(obj):
"""Format a datetime object for iso8601"""
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
@implements_to_string
class AtomFeed(object):
"""A helper class that creates Atom feeds.
:param title: the title of the feed. Required.
:param title_type: the type attribute for the title element. One of
``'html'``, ``'text'`` or ``'xhtml'``.
:param url: the url for the feed (not the url *of* the feed)
:param id: a globally unique id for the feed. Must be an URI. If
not present the `feed_url` is used, but one of both is
required.
:param updated: the time the feed was modified the last time. Must
be a :class:`datetime.datetime` object. If not
present the latest entry's `updated` is used.
:param feed_url: the URL to the feed. Should be the URL that was
requested.
:param author: the author of the feed. Must be either a string (the
name) or a dict with name (required) and uri or
email (both optional). Can be a list of (may be
mixed, too) strings and dicts, too, if there are
multiple authors. Required if not every entry has an
author element.
:param icon: an icon for the feed.
:param logo: a logo for the feed.
:param rights: copyright information for the feed.
:param rights_type: the type attribute for the rights element. One of
``'html'``, ``'text'`` or ``'xhtml'``. Default is
``'text'``.
:param subtitle: a short description of the feed.
:param subtitle_type: the type attribute for the subtitle element.
One of ``'text'``, ``'html'``, ``'text'``
or ``'xhtml'``. Default is ``'text'``.
:param links: additional links. Must be a list of dictionaries with
href (required) and rel, type, hreflang, title, length
(all optional)
:param generator: the software that generated this feed. This must be
a tuple in the form ``(name, url, version)``. If
you don't want to specify one of them, set the item
to `None`.
:param entries: a list with the entries for the feed. Entries can also
be added later with :meth:`add`.
For more information on the elements see
http://www.atomenabled.org/developers/syndication/
Everywhere where a list is demanded, any iterable can be used.
"""
default_generator = ('Werkzeug', None, None)
def __init__(self, title=None, entries=None, **kwargs):
self.title = title
self.title_type = kwargs.get('title_type', 'text')
self.url = kwargs.get('url')
self.feed_url = kwargs.get('feed_url', self.url)
self.id = kwargs.get('id', self.feed_url)
self.updated = kwargs.get('updated')
self.author = kwargs.get('author', ())
self.icon = kwargs.get('icon')
self.logo = kwargs.get('logo')
self.rights = kwargs.get('rights')
self.rights_type = kwargs.get('rights_type')
self.subtitle = kwargs.get('subtitle')
self.subtitle_type = kwargs.get('subtitle_type', 'text')
self.generator = kwargs.get('generator')
if self.generator is None:
self.generator = self.default_generator
self.links = kwargs.get('links', [])
self.entries = entries and list(entries) or []
if not hasattr(self.author, '__iter__') \
or isinstance(self.author, string_types + (dict,)):
self.author = [self.author]
for i, author in enumerate(self.author):
if not isinstance(author, dict):
self.author[i] = {'name': author}
if not self.title:
raise ValueError('title is required')
if not self.id:
raise ValueError('id is required')
for author in self.author:
if 'name' not in author:
raise TypeError('author must contain at least a name')
def add(self, *args, **kwargs):
"""Add a new entry to the feed. This function can either be called
with a :class:`FeedEntry` or some keyword and positional arguments
that are forwarded to the :class:`FeedEntry` constructor.
"""
if len(args) == 1 and not kwargs and isinstance(args[0], FeedEntry):
self.entries.append(args[0])
else:
kwargs['feed_url'] = self.feed_url
self.entries.append(FeedEntry(*args, **kwargs))
def __repr__(self):
return '<%s %r (%d entries)>' % (
self.__class__.__name__,
self.title,
len(self.entries)
)
def generate(self):
"""Return a generator that yields pieces of XML."""
# atom demands either an author element in every entry or a global one
if not self.author:
if False in map(lambda e: bool(e.author), self.entries):
self.author = ({'name': 'Unknown author'},)
if not self.updated:
dates = sorted([entry.updated for entry in self.entries])
self.updated = dates and dates[-1] or datetime.utcnow()
yield u'<?xml version="1.0" encoding="utf-8"?>\n'
yield u'<feed xmlns="http://www.w3.org/2005/Atom">\n'
yield ' ' + _make_text_block('title', self.title, self.title_type)
yield u' <id>%s</id>\n' % escape(self.id)
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
if self.url:
yield u' <link href="%s" />\n' % escape(self.url)
if self.feed_url:
yield u' <link href="%s" rel="self" />\n' % \
escape(self.feed_url)
for link in self.links:
yield u' <link %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(link[k])) for k in link)
for author in self.author:
yield u' <author>\n'
yield u' <name>%s</name>\n' % escape(author['name'])
if 'uri' in author:
yield u' <uri>%s</uri>\n' % escape(author['uri'])
if 'email' in author:
yield ' <email>%s</email>\n' % escape(author['email'])
yield ' </author>\n'
if self.subtitle:
yield ' ' + _make_text_block('subtitle', self.subtitle,
self.subtitle_type)
if self.icon:
yield u' <icon>%s</icon>\n' % escape(self.icon)
if self.logo:
yield u' <logo>%s</logo>\n' % escape(self.logo)
if self.rights:
yield ' ' + _make_text_block('rights', self.rights,
self.rights_type)
generator_name, generator_url, generator_version = self.generator
if generator_name or generator_url or generator_version:
tmp = [u' <generator']
if generator_url:
tmp.append(u' uri="%s"' % escape(generator_url))
if generator_version:
tmp.append(u' version="%s"' % escape(generator_version))
tmp.append(u'>%s</generator>\n' % escape(generator_name))
yield u''.join(tmp)
for entry in self.entries:
for line in entry.generate():
yield u' ' + line
yield u'</feed>\n'
def to_string(self):
"""Convert the feed into a string."""
return u''.join(self.generate())
def get_response(self):
"""Return a response object for the feed."""
return BaseResponse(self.to_string(), mimetype='application/atom+xml')
def __call__(self, environ, start_response):
"""Use the class as WSGI response object."""
return self.get_response()(environ, start_response)
def __str__(self):
return self.to_string()
@implements_to_string
class FeedEntry(object):
"""Represents a single entry in a feed.
:param title: the title of the entry. Required.
:param title_type: the type attribute for the title element. One of
``'html'``, ``'text'`` or ``'xhtml'``.
:param content: the content of the entry.
:param content_type: the type attribute for the content element. One
of ``'html'``, ``'text'`` or ``'xhtml'``.
:param summary: a summary of the entry's content.
:param summary_type: the type attribute for the summary element. One
of ``'html'``, ``'text'`` or ``'xhtml'``.
:param url: the url for the entry.
:param id: a globally unique id for the entry. Must be an URI. If
not present the URL is used, but one of both is required.
:param updated: the time the entry was modified the last time. Must
be a :class:`datetime.datetime` object. Required.
:param author: the author of the entry. Must be either a string (the
name) or a dict with name (required) and uri or
email (both optional). Can be a list of (may be
mixed, too) strings and dicts, too, if there are
multiple authors. Required if the feed does not have an
author element.
:param published: the time the entry was initially published. Must
be a :class:`datetime.datetime` object.
:param rights: copyright information for the entry.
:param rights_type: the type attribute for the rights element. One of
``'html'``, ``'text'`` or ``'xhtml'``. Default is
``'text'``.
:param links: additional links. Must be a list of dictionaries with
href (required) and rel, type, hreflang, title, length
(all optional)
:param categories: categories for the entry. Must be a list of dictionaries
with term (required), scheme and label (all optional)
:param xml_base: The xml base (url) for this feed item. If not provided
it will default to the item url.
For more information on the elements see
http://www.atomenabled.org/developers/syndication/
Everywhere where a list is demanded, any iterable can be used.
"""
def __init__(self, title=None, content=None, feed_url=None, **kwargs):
self.title = title
self.title_type = kwargs.get('title_type', 'text')
self.content = content
self.content_type = kwargs.get('content_type', 'html')
self.url = kwargs.get('url')
self.id = kwargs.get('id', self.url)
self.updated = kwargs.get('updated')
self.summary = kwargs.get('summary')
self.summary_type = kwargs.get('summary_type', 'html')
self.author = kwargs.get('author', ())
self.published = kwargs.get('published')
self.rights = kwargs.get('rights')
self.links = kwargs.get('links', [])
self.categories = kwargs.get('categories', [])
self.xml_base = kwargs.get('xml_base', feed_url)
if not hasattr(self.author, '__iter__') \
or isinstance(self.author, string_types + (dict,)):
self.author = [self.author]
for i, author in enumerate(self.author):
if not isinstance(author, dict):
self.author[i] = {'name': author}
if not self.title:
raise ValueError('title is required')
if not self.id:
raise ValueError('id is required')
if not self.updated:
raise ValueError('updated is required')
def __repr__(self):
return '<%s %r>' % (
self.__class__.__name__,
self.title
)
def generate(self):
"""Yields pieces of ATOM XML."""
base = ''
if self.xml_base:
base = ' xml:base="%s"' % escape(self.xml_base)
yield u'<entry%s>\n' % base
yield u' ' + _make_text_block('title', self.title, self.title_type)
yield u' <id>%s</id>\n' % escape(self.id)
yield u' <updated>%s</updated>\n' % format_iso8601(self.updated)
if self.published:
yield u' <published>%s</published>\n' % \
format_iso8601(self.published)
if self.url:
yield u' <link href="%s" />\n' % escape(self.url)
for author in self.author:
yield u' <author>\n'
yield u' <name>%s</name>\n' % escape(author['name'])
if 'uri' in author:
yield u' <uri>%s</uri>\n' % escape(author['uri'])
if 'email' in author:
yield u' <email>%s</email>\n' % escape(author['email'])
yield u' </author>\n'
for link in self.links:
yield u' <link %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(link[k])) for k in link)
for category in self.categories:
yield u' <category %s/>\n' % ''.join('%s="%s" ' % \
(k, escape(category[k])) for k in category)
if self.summary:
yield u' ' + _make_text_block('summary', self.summary,
self.summary_type)
if self.content:
yield u' ' + _make_text_block('content', self.content,
self.content_type)
yield u'</entry>\n'
def to_string(self):
"""Convert the feed item into a unicode object."""
return u''.join(self.generate())
def __str__(self):
return self.to_string()
| apache-2.0 |
pgoeser/gnuradio | gnuradio-examples/python/digital/benchmark_qt_loopback2.py | 5 | 19176 | #!/usr/bin/env python
#
# Copyright 2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gru, modulation_utils2
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import random, time, struct, sys, os, math
from threading import Thread
# from current dir
from transmit_path import transmit_path
from receive_path import receive_path
try:
from gnuradio.qtgui import qtgui
from PyQt4 import QtGui, QtCore
import sip
except ImportError:
print "Please install gr-qtgui."
sys.exit(1)
try:
from qt_digital_window2 import Ui_DigitalWindow
except ImportError:
print "Error: could not find qt_digital_window2.py:"
print "\t\"pyuic4 qt_digital_window2.ui -o qt_digital_window2.py\""
sys.exit(1)
#print os.getpid()
#raw_input()
# ////////////////////////////////////////////////////////////////////
# Define the QT Interface and Control Dialog
# ////////////////////////////////////////////////////////////////////
class dialog_box(QtGui.QMainWindow):
def __init__(self, snkTx, snkRx, fg, parent=None):
QtGui.QWidget.__init__(self, parent)
self.gui = Ui_DigitalWindow()
self.gui.setupUi(self)
self.fg = fg
self.set_sample_rate(self.fg.sample_rate())
self.set_snr(self.fg.snr())
self.set_frequency(self.fg.frequency_offset())
self.set_time_offset(self.fg.timing_offset())
self.set_gain_clock(self.fg.rx_gain_clock())
self.set_gain_phase(self.fg.rx_gain_phase())
self.set_gain_freq(self.fg.rx_gain_freq())
# Add the qtsnk widgets to the hlayout box
self.gui.sinkLayout.addWidget(snkTx)
self.gui.sinkLayout.addWidget(snkRx)
# Connect up some signals
self.connect(self.gui.pauseButton, QtCore.SIGNAL("clicked()"),
self.pauseFg)
self.connect(self.gui.sampleRateEdit, QtCore.SIGNAL("editingFinished()"),
self.sampleRateEditText)
self.connect(self.gui.snrEdit, QtCore.SIGNAL("editingFinished()"),
self.snrEditText)
self.connect(self.gui.freqEdit, QtCore.SIGNAL("editingFinished()"),
self.freqEditText)
self.connect(self.gui.timeEdit, QtCore.SIGNAL("editingFinished()"),
self.timeEditText)
self.connect(self.gui.gainClockEdit, QtCore.SIGNAL("editingFinished()"),
self.gainClockEditText)
self.connect(self.gui.gainPhaseEdit, QtCore.SIGNAL("editingFinished()"),
self.gainPhaseEditText)
self.connect(self.gui.gainFreqEdit, QtCore.SIGNAL("editingFinished()"),
self.gainFreqEditText)
# Build a timer to update the packet number and PER fields
self.update_delay = 250 # time between updating packet rate fields
self.pkt_timer = QtCore.QTimer(self)
self.connect(self.pkt_timer, QtCore.SIGNAL("timeout()"),
self.updatePacketInfo)
self.pkt_timer.start(self.update_delay)
def pauseFg(self):
if(self.gui.pauseButton.text() == "Pause"):
self.fg.stop()
self.fg.wait()
self.gui.pauseButton.setText("Unpause")
else:
self.fg.start()
self.gui.pauseButton.setText("Pause")
# Accessor functions for Gui to manipulate system parameters
def set_sample_rate(self, sr):
ssr = eng_notation.num_to_str(sr)
self.gui.sampleRateEdit.setText(QtCore.QString("%1").arg(ssr))
def sampleRateEditText(self):
try:
rate = self.gui.sampleRateEdit.text().toAscii()
srate = eng_notation.str_to_num(rate)
#self.fg.set_sample_rate(srate)
except RuntimeError:
pass
# Accessor functions for Gui to manipulate channel model
def set_snr(self, snr):
self.gui.snrEdit.setText(QtCore.QString("%1").arg(snr))
def set_frequency(self, fo):
self.gui.freqEdit.setText(QtCore.QString("%1").arg(fo))
def set_time_offset(self, to):
self.gui.timeEdit.setText(QtCore.QString("%1").arg(to))
def snrEditText(self):
try:
snr = self.gui.snrEdit.text().toDouble()[0]
self.fg.set_snr(snr)
except RuntimeError:
pass
def freqEditText(self):
try:
freq = self.gui.freqEdit.text().toDouble()[0]
self.fg.set_frequency_offset(freq)
except RuntimeError:
pass
def timeEditText(self):
try:
to = self.gui.timeEdit.text().toDouble()[0]
self.fg.set_timing_offset(to)
except RuntimeError:
pass
# Accessor functions for Gui to manipulate receiver parameters
def set_gain_clock(self, gain):
self.gui.gainClockEdit.setText(QtCore.QString("%1").arg(gain))
def set_gain_phase(self, gain_phase):
self.gui.gainPhaseEdit.setText(QtCore.QString("%1").arg(gain_phase))
def set_gain_freq(self, gain_freq):
self.gui.gainFreqEdit.setText(QtCore.QString("%1").arg(gain_freq))
def set_alpha_time(self, alpha):
self.gui.alphaTimeEdit.setText(QtCore.QString("%1").arg(alpha))
def set_beta_time(self, beta):
self.gui.betaTimeEdit.setText(QtCore.QString("%1").arg(beta))
def set_alpha_phase(self, alpha):
self.gui.alphaPhaseEdit.setText(QtCore.QString("%1").arg(alpha))
def gainPhaseEditText(self):
try:
gain_phase = self.gui.gainPhaseEdit.text().toDouble()[0]
self.fg.set_rx_gain_phase(gain_phase)
except RuntimeError:
pass
def gainClockEditText(self):
try:
gain = self.gui.gainClockEdit.text().toDouble()[0]
self.fg.set_rx_gain_clock(gain)
except RuntimeError:
pass
def gainFreqEditText(self):
try:
gain = self.gui.gainFreqEdit.text().toDouble()[0]
self.fg.set_rx_gain_freq(gain)
except RuntimeError:
pass
# Accessor functions for packet error reporting
def updatePacketInfo(self):
# Pull these globals in from the main thread
global n_rcvd, n_right, pktno
if(pktno > 0):
per = float(n_rcvd - n_right)/float(pktno)
else:
per = 0
self.gui.pktsRcvdEdit.setText(QtCore.QString("%1").arg(n_rcvd))
self.gui.pktsCorrectEdit.setText(QtCore.QString("%1").arg(n_right))
self.gui.perEdit.setText(QtCore.QString("%1").arg(float(per), 0, 'e', 4))
# ////////////////////////////////////////////////////////////////////
# Define the GNU Radio Top Block
# ////////////////////////////////////////////////////////////////////
class my_top_block(gr.top_block):
def __init__(self, mod_class, demod_class, rx_callback, options):
gr.top_block.__init__(self)
self._sample_rate = options.sample_rate
channelon = True;
self.gui_on = options.gui
self._frequency_offset = options.frequency_offset
self._timing_offset = options.timing_offset
self._tx_amplitude = options.tx_amplitude
self._snr_dB = options.snr
self._noise_voltage = self.get_noise_voltage(self._snr_dB)
# With new interface, sps does not get set by default, but
# in the loopback, we don't recalculate it; so just force it here
if(options.samples_per_symbol == None):
options.samples_per_symbol = 2
self.txpath = transmit_path(mod_class, options)
self.throttle = gr.throttle(gr.sizeof_gr_complex, self.sample_rate())
self.rxpath = receive_path(demod_class, rx_callback, options)
# FIXME: do better exposure to lower issues for control
self._gain_clock = self.rxpath.packet_receiver._demodulator._timing_alpha
self._gain_phase = self.rxpath.packet_receiver._demodulator._phase_alpha
self._gain_freq = self.rxpath.packet_receiver._demodulator._freq_alpha
if channelon:
self.channel = gr.channel_model(self._noise_voltage,
self.frequency_offset(),
self.timing_offset())
if options.discontinuous:
z = 20000*[0,]
self.zeros = gr.vector_source_c(z, True)
packet_size = 5*((4+8+4+1500+4) * 8)
self.mux = gr.stream_mux(gr.sizeof_gr_complex, [packet_size-0, int(9e5)])
# Connect components
self.connect(self.txpath, self.throttle, (self.mux,0))
self.connect(self.zeros, (self.mux,1))
self.connect(self.mux, self.channel, self.rxpath)
else:
self.connect(self.txpath, self.throttle, self.channel, self.rxpath)
if self.gui_on:
self.qapp = QtGui.QApplication(sys.argv)
fftsize = 2048
self.snk_tx = qtgui.sink_c(fftsize, gr.firdes.WIN_BLACKMAN_hARRIS,
0, 1,
"Tx", True, True, False, True, True)
self.snk_rx = qtgui.sink_c(fftsize, gr.firdes.WIN_BLACKMAN_hARRIS,
0, 1,
"Rx", True, True, False, True, True)
self.snk_tx.set_frequency_axis(-80, 0)
self.snk_rx.set_frequency_axis(-60, 20)
self.freq_recov = self.rxpath.packet_receiver._demodulator.freq_recov
self.phase_recov = self.rxpath.packet_receiver._demodulator.phase_recov
self.time_recov = self.rxpath.packet_receiver._demodulator.time_recov
self.freq_recov.set_alpha(self._gain_freq)
self.freq_recov.set_beta(self._gain_freq/10.0)
self.phase_recov.set_alpha(self._gain_phase)
self.phase_recov.set_beta(0.25*self._gain_phase*self._gain_phase)
self.time_recov.set_alpha(self._gain_clock)
self.time_recov.set_beta(0.25*self._gain_clock*self._gain_clock)
# Connect to the QT sinks
# FIXME: make better exposure to receiver from rxpath
self.connect(self.channel, self.snk_tx)
self.connect(self.phase_recov, self.snk_rx)
#self.connect(self.freq_recov, self.snk_rx)
pyTxQt = self.snk_tx.pyqwidget()
pyTx = sip.wrapinstance(pyTxQt, QtGui.QWidget)
pyRxQt = self.snk_rx.pyqwidget()
pyRx = sip.wrapinstance(pyRxQt, QtGui.QWidget)
self.main_box = dialog_box(pyTx, pyRx, self)
self.main_box.show()
else:
# Connect components
self.connect(self.txpath, self.throttle, self.rxpath)
# System Parameters
def sample_rate(self):
return self._sample_rate
def set_sample_rate(self, sr):
self._sample_rate = sr
#self.throttle.set_samples_per_second(self._sample_rate)
# Channel Model Parameters
def snr(self):
return self._snr_dB
def set_snr(self, snr):
self._snr_dB = snr
self._noise_voltage = self.get_noise_voltage(self._snr_dB)
self.channel.set_noise_voltage(self._noise_voltage)
def get_noise_voltage(self, SNR):
snr = 10.0**(SNR/10.0)
power_in_signal = abs(self._tx_amplitude)**2
noise_power = power_in_signal/snr
noise_voltage = math.sqrt(noise_power)
return noise_voltage
def frequency_offset(self):
return self._frequency_offset * self.sample_rate()
def set_frequency_offset(self, fo):
self._frequency_offset = fo / self.sample_rate()
self.channel.set_frequency_offset(self._frequency_offset)
def timing_offset(self):
return self._timing_offset
def set_timing_offset(self, to):
self._timing_offset = to
self.channel.set_timing_offset(self._timing_offset)
# Receiver Parameters
def rx_gain_clock(self):
return self._gain_clock
def rx_gain_clock_beta(self):
return self._gain_clock_beta
def set_rx_gain_clock(self, gain):
self._gain_clock = gain
self._gain_clock_beta = .25 * self._gain_clock * self._gain_clock
self.rxpath.packet_receiver._demodulator.time_recov.set_alpha(self._gain_clock)
self.rxpath.packet_receiver._demodulator.time_recov.set_beta(self._gain_clock_beta)
def rx_gain_phase(self):
return self._gain_phase
def rx_gain_phase_beta(self):
return self._gain_phase_beta
def set_rx_gain_phase(self, gain_phase):
self._gain_phase = gain_phase
self._gain_phase_beta = .25 * self._gain_phase * self._gain_phase
self.rxpath.packet_receiver._demodulator.phase_recov.set_alpha(self._gain_phase)
self.rxpath.packet_receiver._demodulator.phase_recov.set_beta(self._gain_phase_beta)
def rx_gain_freq(self):
return self._gain_freq
def set_rx_gain_freq(self, gain_freq):
self._gain_freq = gain_freq
#self._gain_freq_beta = .25 * self._gain_freq * self._gain_freq
self.rxpath.packet_receiver._demodulator.freq_recov.set_alpha(self._gain_freq)
self.rxpath.packet_receiver._demodulator.freq_recov.set_beta(self._gain_freq/10.0)
#self.rxpath.packet_receiver._demodulator.freq_recov.set_beta(self._gain_fre_beta)
# /////////////////////////////////////////////////////////////////////////////
# Thread to handle the packet sending procedure
# Operates in parallel with qApp.exec_()
# /////////////////////////////////////////////////////////////////////////////
class th_send(Thread):
def __init__(self, send_fnc, megs, sz):
Thread.__init__(self)
self.send = send_fnc
self.nbytes = int(1e6 * megs)
self.pkt_size = int(sz)
def run(self):
# generate and send packets
n = 0
pktno = 0
while n < self.nbytes:
self.send(struct.pack('!H', pktno & 0xffff) +
(self.pkt_size - 2) * chr(pktno & 0xff))
n += self.pkt_size
pktno += 1
self.send(eof=True)
def stop(self):
self.nbytes = 0
# /////////////////////////////////////////////////////////////////////////////
# main
# /////////////////////////////////////////////////////////////////////////////
def main():
global n_rcvd, n_right, pktno
n_rcvd = 0
n_right = 0
pktno = 0
def rx_callback(ok, payload):
global n_rcvd, n_right, pktno
(pktno,) = struct.unpack('!H', payload[0:2])
n_rcvd += 1
if ok:
n_right += 1
if not options.gui:
print "ok = %5s pktno = %4d n_rcvd = %4d n_right = %4d" % (
ok, pktno, n_rcvd, n_right)
def send_pkt(payload='', eof=False):
return tb.txpath.send_pkt(payload, eof)
mods = modulation_utils2.type_1_mods()
demods = modulation_utils2.type_1_demods()
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
expert_grp = parser.add_option_group("Expert")
channel_grp = parser.add_option_group("Channel")
parser.add_option("-m", "--modulation", type="choice", choices=mods.keys(),
default='dbpsk2',
help="Select modulation from: %s [default=%%default]"
% (', '.join(mods.keys()),))
parser.add_option("-s", "--size", type="eng_float", default=1500,
help="set packet size [default=%default]")
parser.add_option("-M", "--megabytes", type="eng_float", default=1.0,
help="set megabytes to transmit [default=%default]")
parser.add_option("","--discontinuous", action="store_true", default=False,
help="enable discontinous transmission (bursts of 5 packets)")
parser.add_option("-G", "--gui", action="store_true", default=False,
help="Turn on the GUI [default=%default]")
channel_grp.add_option("", "--sample-rate", type="eng_float", default=1e5,
help="set speed of channel/simulation rate to RATE [default=%default]")
channel_grp.add_option("", "--snr", type="eng_float", default=30,
help="set the SNR of the channel in dB [default=%default]")
channel_grp.add_option("", "--frequency-offset", type="eng_float", default=0,
help="set frequency offset introduced by channel [default=%default]")
channel_grp.add_option("", "--timing-offset", type="eng_float", default=1.0,
help="set timing offset introduced by channel [default=%default]")
channel_grp.add_option("", "--seed", action="store_true", default=False,
help="use a random seed for AWGN noise [default=%default]")
transmit_path.add_options(parser, expert_grp)
receive_path.add_options(parser, expert_grp)
for mod in mods.values():
mod.add_options(expert_grp)
for demod in demods.values():
demod.add_options(expert_grp)
(options, args) = parser.parse_args ()
if len(args) != 0:
parser.print_help()
sys.exit(1)
r = gr.enable_realtime_scheduling()
if r != gr.RT_OK:
print "Warning: failed to enable realtime scheduling"
# Create an instance of a hierarchical block
tb = my_top_block(mods[options.modulation],
demods[options.modulation],
rx_callback, options)
tb.start()
packet_sender = th_send(send_pkt, options.megabytes, options.size)
packet_sender.start()
if(options.gui):
tb.qapp.exec_()
packet_sender.stop()
else:
# Process until done; hack in to the join to stop on an interrupt
while(packet_sender.isAlive()):
try:
packet_sender.join(1)
except KeyboardInterrupt:
packet_sender.stop()
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 |
thaim/ansible | lib/ansible/parsing/utils/addresses.py | 241 | 8167 | # Copyright 2015 Abhijit Menon-Sen <ams@2ndQuadrant.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import re
from ansible.errors import AnsibleParserError, AnsibleError
# Components that match a numeric or alphanumeric begin:end or begin:end:step
# range expression inside square brackets.
numeric_range = r'''
\[
(?:[0-9]+:[0-9]+) # numeric begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
hexadecimal_range = r'''
\[
(?:[0-9a-f]+:[0-9a-f]+) # hexadecimal begin:end
(?::[0-9]+)? # numeric :step (optional)
\]
'''
alphanumeric_range = r'''
\[
(?:
[a-z]:[a-z]| # one-char alphabetic range
[0-9]+:[0-9]+ # ...or a numeric one
)
(?::[0-9]+)? # numeric :step (optional)
\]
'''
# Components that match a 16-bit portion of an IPv6 address in hexadecimal
# notation (0..ffff) or an 8-bit portion of an IPv4 address in decimal notation
# (0..255) or an [x:y(:z)] numeric range.
ipv6_component = r'''
(?:
[0-9a-f]{{1,4}}| # 0..ffff
{range} # or a numeric range
)
'''.format(range=hexadecimal_range)
ipv4_component = r'''
(?:
[01]?[0-9]{{1,2}}| # 0..199
2[0-4][0-9]| # 200..249
25[0-5]| # 250..255
{range} # or a numeric range
)
'''.format(range=numeric_range)
# A hostname label, e.g. 'foo' in 'foo.example.com'. Consists of alphanumeric
# characters plus dashes (and underscores) or valid ranges. The label may not
# start or end with a hyphen or an underscore. This is interpolated into the
# hostname pattern below. We don't try to enforce the 63-char length limit.
label = r'''
(?:[\w]|{range}) # Starts with an alphanumeric or a range
(?:[\w_-]|{range})* # Then zero or more of the same or [_-]
(?<![_-]) # ...as long as it didn't end with [_-]
'''.format(range=alphanumeric_range)
patterns = {
# This matches a square-bracketed expression with a port specification. What
# is inside the square brackets is validated later.
'bracketed_hostport': re.compile(
r'''^
\[(.+)\] # [host identifier]
:([0-9]+) # :port number
$
''', re.X
),
# This matches a bare IPv4 address or hostname (or host pattern including
# [x:y(:z)] ranges) with a port specification.
'hostport': re.compile(
r'''^
((?: # We want to match:
[^:\[\]] # (a non-range character
| # ...or...
\[[^\]]*\] # a complete bracketed expression)
)*) # repeated as many times as possible
:([0-9]+) # followed by a port number
$
''', re.X
),
# This matches an IPv4 address, but also permits range expressions.
'ipv4': re.compile(
r'''^
(?:{i4}\.){{3}}{i4} # Three parts followed by dots plus one
$
'''.format(i4=ipv4_component), re.X | re.I
),
# This matches an IPv6 address, but also permits range expressions.
#
# This expression looks complex, but it really only spells out the various
# combinations in which the basic unit of an IPv6 address (0..ffff) can be
# written, from :: to 1:2:3:4:5:6:7:8, plus the IPv4-in-IPv6 variants such
# as ::ffff:192.0.2.3.
#
# Note that we can't just use ipaddress.ip_address() because we also have to
# accept ranges in place of each component.
'ipv6': re.compile(
r'''^
(?:{0}:){{7}}{0}| # uncompressed: 1:2:3:4:5:6:7:8
(?:{0}:){{1,6}}:| # compressed variants, which are all
(?:{0}:)(?::{0}){{1,6}}| # a::b for various lengths of a,b
(?:{0}:){{2}}(?::{0}){{1,5}}|
(?:{0}:){{3}}(?::{0}){{1,4}}|
(?:{0}:){{4}}(?::{0}){{1,3}}|
(?:{0}:){{5}}(?::{0}){{1,2}}|
(?:{0}:){{6}}(?::{0})| # ...all with 2 <= a+b <= 7
:(?::{0}){{1,6}}| # ::ffff(:ffff...)
{0}?::| # ffff::, ::
# ipv4-in-ipv6 variants
(?:0:){{6}}(?:{0}\.){{3}}{0}|
::(?:ffff:)?(?:{0}\.){{3}}{0}|
(?:0:){{5}}ffff:(?:{0}\.){{3}}{0}
$
'''.format(ipv6_component), re.X | re.I
),
# This matches a hostname or host pattern including [x:y(:z)] ranges.
#
# We roughly follow DNS rules here, but also allow ranges (and underscores).
# In the past, no systematic rules were enforced about inventory hostnames,
# but the parsing context (e.g. shlex.split(), fnmatch.fnmatch()) excluded
# various metacharacters anyway.
#
# We don't enforce DNS length restrictions here (63 characters per label,
# 253 characters total) or make any attempt to process IDNs.
'hostname': re.compile(
r'''^
{label} # We must have at least one label
(?:\.{label})* # Followed by zero or more .labels
$
'''.format(label=label), re.X | re.I | re.UNICODE
),
}
def parse_address(address, allow_ranges=False):
"""
Takes a string and returns a (host, port) tuple. If the host is None, then
the string could not be parsed as a host identifier with an optional port
specification. If the port is None, then no port was specified.
The host identifier may be a hostname (qualified or not), an IPv4 address,
or an IPv6 address. If allow_ranges is True, then any of those may contain
[x:y] range specifications, e.g. foo[1:3] or foo[0:5]-bar[x-z].
The port number is an optional :NN suffix on an IPv4 address or host name,
or a mandatory :NN suffix on any square-bracketed expression: IPv6 address,
IPv4 address, or host name. (This means the only way to specify a port for
an IPv6 address is to enclose it in square brackets.)
"""
# First, we extract the port number if one is specified.
port = None
for matching in ['bracketed_hostport', 'hostport']:
m = patterns[matching].match(address)
if m:
(address, port) = m.groups()
port = int(port)
continue
# What we're left with now must be an IPv4 or IPv6 address, possibly with
# numeric ranges, or a hostname with alphanumeric ranges.
host = None
for matching in ['ipv4', 'ipv6', 'hostname']:
m = patterns[matching].match(address)
if m:
host = address
continue
# If it isn't any of the above, we don't understand it.
if not host:
raise AnsibleError("Not a valid network hostname: %s" % address)
# If we get to this point, we know that any included ranges are valid.
# If the caller is prepared to handle them, all is well.
# Otherwise we treat it as a parse failure.
if not allow_ranges and '[' in host:
raise AnsibleParserError("Detected range in host but was asked to ignore ranges")
return (host, port)
| mit |
b-dollery/testing | lib/ansible/module_utils/a10.py | 322 | 4194 | # This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Copyright (c), Michael DeHaan <michael.dehaan@gmail.com>, 2012-2013
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
AXAPI_PORT_PROTOCOLS = {
'tcp': 2,
'udp': 3,
}
AXAPI_VPORT_PROTOCOLS = {
'tcp': 2,
'udp': 3,
'fast-http': 9,
'http': 11,
'https': 12,
}
def a10_argument_spec():
return dict(
host=dict(type='str', required=True),
username=dict(type='str', aliases=['user', 'admin'], required=True),
password=dict(type='str', aliases=['pass', 'pwd'], required=True, no_log=True),
write_config=dict(type='bool', default=False)
)
def axapi_failure(result):
if 'response' in result and result['response'].get('status') == 'fail':
return True
return False
def axapi_call(module, url, post=None):
'''
Returns a datastructure based on the result of the API call
'''
rsp, info = fetch_url(module, url, data=post)
if not rsp or info['status'] >= 400:
module.fail_json(msg="failed to connect (status code %s), error was %s" % (info['status'], info.get('msg', 'no error given')))
try:
raw_data = rsp.read()
data = json.loads(raw_data)
except ValueError:
# at least one API call (system.action.write_config) returns
# XML even when JSON is requested, so do some minimal handling
# here to prevent failing even when the call succeeded
if 'status="ok"' in raw_data.lower():
data = {"response": {"status": "OK"}}
else:
data = {"response": {"status": "fail", "err": {"msg": raw_data}}}
except:
module.fail_json(msg="could not read the result from the host")
finally:
rsp.close()
return data
def axapi_authenticate(module, base_url, username, password):
url = '%s&method=authenticate&username=%s&password=%s' % (base_url, username, password)
result = axapi_call(module, url)
if axapi_failure(result):
return module.fail_json(msg=result['response']['err']['msg'])
sessid = result['session_id']
return base_url + '&session_id=' + sessid
def axapi_enabled_disabled(flag):
'''
The axapi uses 0/1 integer values for flags, rather than strings
or booleans, so convert the given flag to a 0 or 1. For now, params
are specified as strings only so thats what we check.
'''
if flag == 'enabled':
return 1
else:
return 0
def axapi_get_port_protocol(protocol):
return AXAPI_PORT_PROTOCOLS.get(protocol.lower(), None)
def axapi_get_vport_protocol(protocol):
return AXAPI_VPORT_PROTOCOLS.get(protocol.lower(), None)
| gpl-3.0 |
angad/libjingle-mac | scons-2.2.0/build/lib/SCons/Tool/sgicc.py | 14 | 1906 | """SCons.Tool.sgicc
Tool-specific initialization for MIPSPro cc on SGI.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/sgicc.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo"
import cc
def generate(env):
"""Add Builders and construction variables for gcc to an Environment."""
cc.generate(env)
env['CXX'] = 'CC'
env['SHOBJSUFFIX'] = '.o'
env['STATIC_AND_SHARED_OBJECTS_ARE_THE_SAME'] = 1
def exists(env):
return env.Detect('cc')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| bsd-3-clause |
elba7r/builder | frappe/utils/doctor.py | 10 | 3522 | from __future__ import unicode_literals
import frappe.utils
from collections import defaultdict
from rq import Worker, Connection
from frappe.utils.background_jobs import get_redis_conn, get_queue, get_queue_list
from frappe.utils.scheduler import is_scheduler_disabled
def get_workers():
with Connection(get_redis_conn()):
workers = Worker.all()
return workers
def purge_pending_jobs(event=None, site=None, queue=None):
"""
Purge tasks of the event event type. Passing 'all' will not purge all
events but of the all event type, ie. the ones that are enqueued every five
mintues and would any leave daily, hourly and weekly tasks
"""
purged_task_count = 0
for queue in get_queue_list(queue):
q = get_queue(queue)
for job in q.jobs:
if (site and event):
if job.kwargs['site'] == site and job.kwargs['event'] == event:
job.delete()
purged_task_count+=1
elif site:
if job.kwargs['site'] == site:
job.delete()
purged_task_count+=1
elif event:
if job.kwargs['event'] == event:
job.delete()
purged_task_count+=1
else:
purged_task_count += q.count
q.empty()
return purged_task_count
def get_jobs_by_queue(site=None):
jobs_per_queue = defaultdict(list)
job_count = consolidated_methods = {}
for queue in get_queue_list():
q = get_queue(queue)
for job in q.jobs:
if not site:
jobs_per_queue[queue].append(job.kwargs.get('method') or job.description)
elif job.kwargs['site'] == site:
jobs_per_queue[queue].append(job.kwargs.get('method') or job.description)
consolidated_methods = {}
for method in jobs_per_queue[queue]:
if method not in consolidated_methods.keys():
consolidated_methods[method] = 1
else:
consolidated_methods[method] += 1
job_count[queue] = len(jobs_per_queue[queue])
jobs_per_queue[queue] = consolidated_methods
return jobs_per_queue, job_count
def get_pending_jobs(site=None):
jobs_per_queue = defaultdict(list)
for queue in get_queue_list():
q = get_queue(queue)
for job in q.jobs:
method_kwargs = job.kwargs['kwargs'] if job.kwargs['kwargs'] else ""
if job.kwargs['site'] == site:
jobs_per_queue[queue].append("{0} {1}".
format(job.kwargs['method'], method_kwargs))
return jobs_per_queue
def check_number_of_workers():
return len(get_workers())
def get_running_tasks():
for worker in get_workers():
return worker.get_current_job()
def doctor(site=None):
"""
Prints diagnostic information for the scheduler
"""
with frappe.init_site(site):
workers_online = check_number_of_workers()
jobs_per_queue, job_count = get_jobs_by_queue(site)
print "-----Checking scheduler status-----"
if site:
sites = [site]
else:
sites = frappe.utils.get_sites()
for s in sites:
frappe.init(s)
frappe.connect()
if is_scheduler_disabled():
print "Scheduler disabled for", s
frappe.destroy()
# TODO improve this
print "Workers online:", workers_online
print "-----{0} Jobs-----".format(site)
for queue in get_queue_list():
if jobs_per_queue[queue]:
print "Queue:", queue
print "Number of Jobs: ", job_count[queue]
print "Methods:"
for method, count in jobs_per_queue[queue].iteritems():
print "{0} : {1}".format(method, count)
print "------------"
return True
def pending_jobs(site=None):
print "-----Pending Jobs-----"
pending_jobs = get_pending_jobs(site)
for queue in get_queue_list():
if(pending_jobs[queue]):
print "-----Queue :{0}-----".format(queue)
print "\n".join(pending_jobs[queue])
| mit |
ondergetekende/python-panavatar | panavatar/patterns.py | 1 | 5456 | import math
SQ3 = math.sqrt(3.0)
SQ2 = math.sqrt(3.0)
INV3 = 1.0 / 3
SIN60 = SQ3 * .5
def frange(start, end, step):
"""A range implementation which can handle floats"""
if start <= end:
step = abs(step)
else:
step = -abs(step)
while start < end:
yield start
start += step
def gridrange(start, end, step):
"""Generate a grid of complex numbers"""
for x in frange(start.real, end.real, step.real):
for y in frange(start.imag, end.imag, step.imag):
yield x + y * 1j
def offset_shape(shape, offset):
# offset a shape by translating its coordinates
return [x + offset for x in shape]
def get_tiles(params):
probabilities = [
(20, Triangles),
(10, Squares),
(10, BarsSquares),
(15, Beehive),
(15, Blocks),
(10, Corner),
(10, Brick),
(5, RoadBrick),
(5, SparseSquares),
]
assert sum(x[0] for x in probabilities) == 100
pattern = params.weighted_choice(probabilities, "pattern")
return pattern(params).generate_tiles()
class TilingPattern(object):
stride = 1 + 1j
colors = [0]
is_sparse = False
def __init__(self, params):
self.params = params
def generate_tiles(self, overscan=.5):
# Later deformations may cause areas outside the main viewport to become
# visible, so we need to overscan to make sure there is something to see
# there.
scale = self.params.detail
stride = self.stride * scale
if self.is_sparse:
top_left = -overscan * self.params.size
bottom_right = (1 + overscan) * self.params.size
top_right = bottom_right.real + 1j * top_left.imag
bottom_left = top_left.real + 1j * bottom_right.imag
yield [top_left, top_right, bottom_right, bottom_left], 0
for idx, shape in enumerate(self.pattern):
color = self.colors[idx % len(self.colors)]
for pos in gridrange(-overscan * self.params.size,
(1 + overscan) * self.params.size,
stride):
yield [pos + point * scale for point in shape], color
class Squares(TilingPattern):
stride = 1 + 1j
pattern = [[0 + 0j, 0 + 1j, 1 + 1j, 1 + 0j]]
class SparseSquares(TilingPattern):
is_sparse = True
stride = 1 + 1j
pattern = [[.33 + .33j, .33 + .66j, .66 + .66j, .66 + .33j]]
class BarsSquares(TilingPattern):
stride = 1 + 1j
colors = [0, 2, 2, 1]
def __init__(self, params):
super(BarsSquares, self).__init__(params)
split = self.params.uniform("bar_ratio", .5, .95)
splitj = split * 1j
self.pattern = [[0 + 0j, 0 + splitj, split + splitj, split + 0j],
[split + 0j, split + splitj, 1 + splitj, 1 + 0j],
[0 + splitj, 0 + 1j, split + 1j, split + splitj],
[split + splitj, split + 1j, 1 + 1j, 1 + splitj]]
class Triangles(TilingPattern):
stride = 1.0 + SQ3 * 1j
# colors = [0, 1]
pattern = [
[0.0 + 0.0j * SQ3, 1.0 + 0.0j * SQ3, 0.5 + 0.5j * SQ3],
[0.5 + 0.5j * SQ3, 1.0 + 0.0j * SQ3, 1.5 + 0.5j * SQ3],
[0.0 + 1.0j * SQ3, 1.0 + 1.0j * SQ3, 0.5 + 0.5j * SQ3],
[0.5 + 0.5j * SQ3, 1.0 + 1.0j * SQ3, 1.5 + 0.5j * SQ3],
]
class Corner(TilingPattern):
# Corners
corner = [0 + 1j, 1 + 1j, 1 + 0j, 2 + 0j,
2 + 1j, 2 + 2j, 1 + 2j, 0 + 2j]
pattern = [
corner,
offset_shape(corner, 1 + 1j),
offset_shape(corner, 2 + 2j),
]
stride = 3 + 3j
class Beehive(TilingPattern):
hexagon = [0 + 0j, # top left
SIN60 - .5j, # top
2 * SIN60 + 0j, # top right
2 * SIN60 + SIN60 * 1j, # bottom right
SIN60 + SIN60 * 1j + .5j, # bottom
0 + SIN60 * 1j] # bottom left
pattern = [
hexagon,
offset_shape(hexagon, -SIN60 + SIN60 * 1j + .5j)
]
stride = 2 * SIN60 + 1j + 2j * SIN60
class Blocks(TilingPattern):
top = [0 + -.5j, SIN60 + 0j,
0 + .5j, -SIN60 + 0j]
left = [-SIN60 + 0j, 0 + .5j,
0 + (SIN60 + .5) * 1j, -SIN60 + (SIN60) * 1j]
right = [SIN60 + 0j, 0 + .5j,
0 + (SIN60 + .5) * 1j, SIN60 + (SIN60) * 1j]
pattern = [
top, left, right,
offset_shape(top, -SIN60 + SIN60 * 1j + .5j),
offset_shape(left, -SIN60 + SIN60 * 1j + .5j),
offset_shape(right, -SIN60 + SIN60 * 1j + .5j),
]
colors = [1, 0, 2]
stride = 2 * SIN60 + 1j + 2j * SIN60
class Brick(TilingPattern):
# Stackered bricks
brick = [0, .5, 1, 1 + .5j, .5 + .5j, .5j]
pattern = [
brick,
offset_shape(brick, -.5 + .5j),
]
stride = 1 + 1j
class RoadBrick(TilingPattern):
# roadwork bricks
brickh = [0, 1, 2, 2 + 1j, 1 + 1j, 1j]
brickv = [0, 1, 1 + 1j, 1 + 2j, 2j, 1j]
# aaEF
# HbbF
# HIcc
# dIEd
pattern = [
offset_shape(brickh, 0.0 + 0.0j), # a
offset_shape(brickh, 1 + 1j), # b
offset_shape(brickh, 2.0 + 2.0j), # c
offset_shape(brickh, -1 - 1j), # d
offset_shape(brickv, 0.0 + 1j), # E
offset_shape(brickv, 1 - 2.0j), # F
offset_shape(brickv, 2.0 - 1j), # G
offset_shape(brickv, -1 + 0.0j), # H
]
stride = 4 + 4j
| mit |
Pedram26/Humans-vs-Aliens | HumansAliens.app/Contents/Resources/lib/python2.7/pygame/examples/headless_no_windows_needed.py | 32 | 1333 | #!/usr/bin/env python
"""How to use pygame with no windowing system, like on headless servers.
Thumbnail generation with scaling is an example of what you can do with pygame.
NOTE: the pygame scale function uses mmx/sse if available, and can be run
in multiple threads.
"""
useage = """-scale inputimage outputimage new_width new_height
eg. -scale in.png out.png 50 50
"""
import os, sys
# set SDL to use the dummy NULL video driver,
# so it doesn't need a windowing system.
os.environ["SDL_VIDEODRIVER"] = "dummy"
import pygame.transform
if 1:
#some platforms need to init the display for some parts of pygame.
import pygame.display
pygame.display.init()
screen = pygame.display.set_mode((1,1))
def scaleit(fin, fout, w, h):
i = pygame.image.load(fin)
if hasattr(pygame.transform, "smoothscale"):
scaled_image = pygame.transform.smoothscale(i, (w,h))
else:
scaled_image = pygame.transform.scale(i, (w,h))
pygame.image.save(scaled_image, fout)
def main(fin, fout, w, h):
"""smoothscale image file named fin as fout with new size (w,h)"""
scaleit(fin, fout, w, h)
if __name__ == "__main__":
if "-scale" in sys.argv:
fin, fout, w, h = sys.argv[2:]
w, h = map(int, [w,h])
main(fin, fout, w,h)
else:
print (useage)
| apache-2.0 |
bluestar-solutions/account-invoicing | account_invoice_period_usability/__openerp__.py | 5 | 1562 | # -*- coding: utf-8 -*-
##############################################################################
#
# This file is part of account_invoice_period_usability,
# an Odoo module.
#
# Authors: ACSONE SA/NV (<http://acsone.eu>)
#
# account_invoice_period_usability is free software:
# you can redistribute it and/or modify it under the terms of the GNU
# Affero General Public License as published by the Free Software
# Foundation,either version 3 of the License, or (at your option) any
# later version.
#
# account_invoice_period_usability is distributed
# in the hope that it will be useful, but WITHOUT ANY WARRANTY; without
# even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with account_invoice_period_usability.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Account Invoice Period Usability',
'summary': """
Display in the supplier invoice form the fiscal period
next to the invoice date""",
'author': 'ACSONE SA/NV,Odoo Community Association (OCA)',
'website': 'http://www.acsone.eu',
'category': 'Accounting & Finance',
'version': '8.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'account',
],
'data': [
'views/account_invoice_view.xml',
],
'installable': False,
'auto_install': False,
'application': False,
}
| agpl-3.0 |
LYZhelloworld/Courses | 50.021/03/csp/jobs.py | 1 | 8904 | from csp import *
# jobs is a list of jobs
# a job is a list of operations
# an operation is [name, list of alternative resources that could be
# used, a release time (earliest start time (usually 0), and a duration.
# This example is from "Variable And Value Ordering Heuristics For The
# Job Shop Scheduling Constraint Satisfaction Problem", Norman
# M. Sadeh and Mark S. Fox. A deadline of 15 should work.
jobs4 = [ [['o_1_1', [1], 0, 3], ['o_1_2', [2], 0, 3], ['o_1_3', [3], 0, 3]],
[['o_2_1', [1], 0, 3], ['o_2_2', [2], 0, 3]],
[['o_3_1', [3], 0, 3], ['o_3_2', [1], 0, 3], ['o_3_3', [2], 0, 3]],
[['o_4_1', [4], 0, 3], ['o_4_2', [2], 0, 3]] ]
# A more compact specification, assuming a single resource per
# operation and start times of 0
# Each row specifies a job by X pairs of consecutive numbers. Each
# pair of numbers defines one task of the job, which represents the
# processing of a job on a machine. For each pair, the first number
# identifies the machine it executes on, and the second number is the
# duration. The order of the X pairs defines the sequence of the
# tasks for a job.
# Simple example from www.columbia.edu/~cs2035/courses/ieor4405.S03/jobshop.doc
# A deadline of 31 should work.
j3 = [
[1, 10, 2, 8, 3, 4],
[2, 8, 1, 3, 4, 5, 3, 6],
[1, 4, 2, 7, 4, 3]
]
# http://yetanothermathprogrammingconsultant.blogspot.sg/2012_04_01_archive.html
# job1 job2 job3 job4 job5 job6 job7 job8 job9 job10
# machine1 4 2 1 5 4 3 5 2 1 8
# machine2 2 5 8 6 7 4 7 3 6 2
# machine3 2 8 6 2 2 4 2 7 1 8
# A deadline of 52 should work.
m3x10 = [
[ 4, 2, 1, 5, 4, 3, 5, 2, 1, 8 ],
[ 2, 5, 8, 6, 7, 4, 7, 3, 6, 2 ],
[ 2, 8, 6, 2, 2, 4, 2, 7, 1, 8 ]]
# This is the transpose of what we want
j10x3 = [[1, 0, 2, 0, 3, 0] for i in range(10)]
for mi, m in enumerate(m3x10):
for ji, duration in enumerate(m):
j10x3[ji][mi*2+1] = duration
#print j10x3
# This example is a (very hard) job shop scheduling problem from Lawrence
# (1984). This test is also known as LA19 in the literature, and its
# optimal makespan is known to be 842 (Applegate and Cook,
# 1991). There are 10 jobs (J1-J10) and 10 machines (M0-M9). Every job
# must be processed on each of the 10 machines in a predefined
# sequence. The objective is to minimize the completion time of the
# last job to be processed, known as the makespan.
j10x10 = [
[2, 44, 3, 5, 5, 58, 4, 97, 0, 9, 7, 84, 8, 77, 9, 96, 1, 58, 6, 89],
[4, 15, 7, 31, 1, 87, 8, 57, 0, 77, 3, 85, 2, 81, 5, 39, 9, 73, 6, 21],
[9, 82, 6, 22, 4, 10, 3, 70, 1, 49, 0, 40, 8, 34, 2, 48, 7, 80, 5, 71],
[1, 91, 2, 17, 7, 62, 5, 75, 8, 47, 4, 11, 3, 7, 6, 72, 9, 35, 0, 55],
[6, 71, 1, 90, 3, 75, 0, 64, 2, 94, 8, 15, 4, 12, 7, 67, 9, 20, 5, 50],
[7, 70, 5, 93, 8, 77, 2, 29, 4, 58, 6, 93, 3, 68, 1, 57, 9, 7, 0, 52],
[6, 87, 1, 63, 4, 26, 5, 6, 2, 82, 3, 27, 7, 56, 8, 48, 9, 36, 0, 95],
[0, 36, 5, 15, 8, 41, 9, 78, 3, 76, 6, 84, 4, 30, 7, 76, 2, 36, 1, 8],
[5, 88, 2, 81, 3, 13, 6, 82, 4, 54, 7, 13, 8, 29, 9, 40, 1, 78, 0, 75],
[9, 88, 4, 54, 6, 64, 7, 32, 0, 52, 2, 6, 8, 54, 5, 82, 3, 6, 1, 26],
]
def parse_jobs(jobs):
parsed = []
for ji, j in enumerate(jobs):
job = []
for i in range(len(j)/2):
job.append(['o_%d_%d'%(ji,i), [j[i*2]], 0, j[i*2+1]])
parsed.append(job)
return parsed
#####################################################
# The example jobs defined above are
# jobs4 - defined at the top of the file
jobs3 = parse_jobs(j3)
jobs10x10 = parse_jobs(j10x10)
jobs10x3 = parse_jobs(j10x3)
#####################################################
class Jobs:
def __init__(self, jobs):
self.jobs = []
for job in jobs:
self.jobs.append(Job(job))
def __iter__(self):
return self.jobs.__iter__()
def __repr__(self):
result = ''
for i in range(len(self.jobs)):
result += 'Job ' + str(i + 1) + '\n'
result += str(self.jobs[i])
result += '\n'
return result
class Job:
def __init__(self, job):
self.operations = []
for operation in job:
self.operations.append(Operation(operation[0], operation[1], operation[2], operation[3]))
def __contains__(self, op):
for operation in self.operations:
if operation == op:
return True
return False
def __iter__(self):
return self.operations.__iter__()
def __repr__(self):
return '\n'.join([str(operation) for operation in self.operations])
class Operation:
def __init__(self, name, resources, start, duration):
self.name = name
self.resources = tuple(resources)
self.start = start
self.duration = duration
def __eq__(self, other):
return self.name == other.name
def __repr__(self):
return '<%s,%s,%d,%d>' % (self.name, str(list(self.resources)), self.start, self.duration)
def __hash__(self):
return self.name.__hash__()
def generate_CSP(jobs, deadline):
""" From CSP class in csp.py
vars A list of variables; each is atomic (e.g. int or string).
domains A dict of {var:[possible_value, ...]} entries.
neighbors A dict of {var:[var,...]} that for each variable lists
the other variables that participate in constraints.
constraints A function f(A, a, B, b) that returns true if neighbors
A, B satisfy the constraint when they have values A=a, B=b
"""
csp_vars = []
csp_domains = {}
csp_neighbors = {}
for job in jobs:
operation_in_job = []
for operation in job:
csp_vars.append(operation)
if operation not in csp_neighbors:
csp_neighbors[operation] = []
for tmp_op in operation_in_job:
if tmp_op not in csp_neighbors:
csp_neighbors[tmp_op] = []
csp_neighbors[tmp_op].append(operation)
csp_neighbors[operation].append(tmp_op)
operation_in_job.append(operation)
for job1 in jobs:
for operation1 in job1:
for job2 in jobs:
for operation2 in job2:
if operation1 == operation2:
continue
if (set(operation1.resources) & set(operation2.resources)):
csp_neighbors[operation1].append(operation2)
csp_neighbors[operation2].append(operation1)
for operation in csp_vars:
csp_domains[operation] = range(deadline + 1)
def csp_constraints(A, a, B, b):
# Check deadline
if a + A.duration > deadline or b + B.duration > deadline:
return False
# Check if neighbors
for job in jobs:
pos_A = None
pos_B = None
for operation in job:
if (operation) == A:
pos_A = job.operations.index(operation)
if (operation) == B:
pos_B = job.operations.index(operation)
if pos_A == None or pos_B == None:
continue
if A < B:
if a + A.duration > b:
return False
else:
if b + B.duration > a:
return False
break
# Check if same resource
if (set(A.resources) & set(B.resources)):
if set(range(a, a + A.duration)) & set(range(b, b + B.duration)):
return False
return True
return CSP(csp_vars, csp_domains, csp_neighbors, csp_constraints)
def print_search_result(result):
if result == None:
print 'No results found'
print ''
return
assert(type(result) == dict)
time_list = set()
for time in result.values():
time_list.add(time)
time_list = list(time_list)
time_list.sort()
for time in time_list:
print 'Time %d' % time
for item in result:
if result[item] == time:
print item
print ''
if __name__ == '__main__':
jobs_to_test = [(jobs4, 15), (jobs3, 31), (jobs10x3, 58)]
for jobs in jobs_to_test:
j = Jobs(jobs[0])
print j
print_search_result(backtracking_search(generate_CSP(j, jobs[1]), select_unassigned_variable = mrv, order_domain_values = lcv, inference = forward_checking))
| mit |
ashray/VTK-EVM | Filters/Texture/Testing/Python/textureThreshold.py | 20 | 4042 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
#
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read data
#
pl3d = vtk.vtkMultiBlockPLOT3DReader()
pl3d.SetXYZFileName("" + str(VTK_DATA_ROOT) + "/Data/bluntfinxyz.bin")
pl3d.SetQFileName("" + str(VTK_DATA_ROOT) + "/Data/bluntfinq.bin")
pl3d.SetScalarFunctionNumber(100)
pl3d.SetVectorFunctionNumber(202)
pl3d.Update()
output = pl3d.GetOutput().GetBlock(0)
# wall
#
wall = vtk.vtkStructuredGridGeometryFilter()
wall.SetInputData(output)
wall.SetExtent(0,100,0,0,0,100)
wallMap = vtk.vtkPolyDataMapper()
wallMap.SetInputConnection(wall.GetOutputPort())
wallMap.ScalarVisibilityOff()
wallActor = vtk.vtkActor()
wallActor.SetMapper(wallMap)
wallActor.GetProperty().SetColor(0.8,0.8,0.8)
# fin
#
fin = vtk.vtkStructuredGridGeometryFilter()
fin.SetInputData(output)
fin.SetExtent(0,100,0,100,0,0)
finMap = vtk.vtkPolyDataMapper()
finMap.SetInputConnection(fin.GetOutputPort())
finMap.ScalarVisibilityOff()
finActor = vtk.vtkActor()
finActor.SetMapper(finMap)
finActor.GetProperty().SetColor(0.8,0.8,0.8)
# planes to threshold
tmap = vtk.vtkStructuredPointsReader()
tmap.SetFileName("" + str(VTK_DATA_ROOT) + "/Data/texThres2.vtk")
texture = vtk.vtkTexture()
texture.SetInputConnection(tmap.GetOutputPort())
texture.InterpolateOff()
texture.RepeatOff()
plane1 = vtk.vtkStructuredGridGeometryFilter()
plane1.SetInputData(output)
plane1.SetExtent(10,10,0,100,0,100)
thresh1 = vtk.vtkThresholdTextureCoords()
thresh1.SetInputConnection(plane1.GetOutputPort())
thresh1.ThresholdByUpper(1.5)
plane1Map = vtk.vtkDataSetMapper()
plane1Map.SetInputConnection(thresh1.GetOutputPort())
plane1Map.SetScalarRange(output.GetScalarRange())
plane1Actor = vtk.vtkActor()
plane1Actor.SetMapper(plane1Map)
plane1Actor.SetTexture(texture)
plane1Actor.GetProperty().SetOpacity(0.999)
plane2 = vtk.vtkStructuredGridGeometryFilter()
plane2.SetInputData(output)
plane2.SetExtent(30,30,0,100,0,100)
thresh2 = vtk.vtkThresholdTextureCoords()
thresh2.SetInputConnection(plane2.GetOutputPort())
thresh2.ThresholdByLower(1.5)
plane2Map = vtk.vtkDataSetMapper()
plane2Map.SetInputConnection(thresh2.GetOutputPort())
plane2Map.SetScalarRange(output.GetScalarRange())
plane2Actor = vtk.vtkActor()
plane2Actor.SetMapper(plane2Map)
plane2Actor.SetTexture(texture)
plane2Actor.GetProperty().SetOpacity(0.999)
plane3 = vtk.vtkStructuredGridGeometryFilter()
plane3.SetInputData(output)
plane3.SetExtent(35,35,0,100,0,100)
thresh3 = vtk.vtkThresholdTextureCoords()
thresh3.SetInputConnection(plane3.GetOutputPort())
thresh3.ThresholdBetween(1.5,1.8)
plane3Map = vtk.vtkDataSetMapper()
plane3Map.SetInputConnection(thresh3.GetOutputPort())
plane3Map.SetScalarRange(output.GetScalarRange())
plane3Actor = vtk.vtkActor()
plane3Actor.SetMapper(plane3Map)
plane3Actor.SetTexture(texture)
plane3Actor.GetProperty().SetOpacity(0.999)
# outline
outline = vtk.vtkStructuredGridOutlineFilter()
outline.SetInputData(output)
outlineMapper = vtk.vtkPolyDataMapper()
outlineMapper.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(outlineMapper)
outlineProp = outlineActor.GetProperty()
outlineProp.SetColor(0,0,0)
# Add the actors to the renderer, set the background and size
#
ren1.AddActor(outlineActor)
ren1.AddActor(wallActor)
ren1.AddActor(finActor)
ren1.AddActor(plane1Actor)
ren1.AddActor(plane2Actor)
ren1.AddActor(plane3Actor)
ren1.SetBackground(1,1,1)
renWin.SetSize(256,256)
cam1 = vtk.vtkCamera()
cam1.SetClippingRange(1.51176,75.5879)
cam1.SetFocalPoint(2.33749,2.96739,3.61023)
cam1.SetPosition(10.8787,5.27346,15.8687)
cam1.SetViewAngle(30)
cam1.SetViewUp(-0.0610856,0.987798,-0.143262)
ren1.SetActiveCamera(cam1)
iren.Initialize()
# render the image
#
# prevent the tk window from showing up then start the event loop
# --- end of script --
| bsd-3-clause |
Nexenta/cinder | cinder/volume/volume_types.py | 5 | 12667 | # Copyright (c) 2011 Zadara Storage Inc.
# Copyright (c) 2011 OpenStack Foundation
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright 2011 Ken Pepple
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Built-in volume type properties."""
from oslo_config import cfg
from oslo_db import exception as db_exc
from oslo_log import log as logging
from oslo_utils import uuidutils
from cinder import context
from cinder import db
from cinder import exception
from cinder.i18n import _, _LE
from cinder import quota
CONF = cfg.CONF
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
ENCRYPTION_IGNORED_FIELDS = ['volume_type_id', 'created_at', 'updated_at',
'deleted_at']
def create(context,
name,
extra_specs=None,
is_public=True,
projects=None,
description=None):
"""Creates volume types."""
extra_specs = extra_specs or {}
projects = projects or []
elevated = context if context.is_admin else context.elevated()
try:
type_ref = db.volume_type_create(elevated,
dict(name=name,
extra_specs=extra_specs,
is_public=is_public,
description=description),
projects=projects)
except db_exc.DBError:
LOG.exception(_LE('DB error:'))
raise exception.VolumeTypeCreateFailed(name=name,
extra_specs=extra_specs)
return type_ref
def update(context, id, name, description, is_public=None):
"""Update volume type by id."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
elevated = context if context.is_admin else context.elevated()
old_volume_type = get_volume_type(elevated, id)
try:
type_updated = db.volume_type_update(elevated,
id,
dict(name=name,
description=description,
is_public=is_public))
# Rename resource in quota if volume type name is changed.
if name:
old_type_name = old_volume_type.get('name')
if old_type_name != name:
QUOTAS.update_quota_resource(elevated,
old_type_name,
name)
except db_exc.DBError:
LOG.exception(_LE('DB error:'))
raise exception.VolumeTypeUpdateFailed(id=id)
return type_updated
def destroy(context, id):
"""Marks volume types as deleted."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
elevated = context if context.is_admin else context.elevated()
return db.volume_type_destroy(elevated, id)
def get_all_types(context, inactive=0, filters=None, marker=None,
limit=None, sort_keys=None, sort_dirs=None,
offset=None, list_result=False):
"""Get all non-deleted volume_types.
Pass true as argument if you want deleted volume types returned also.
"""
vol_types = db.volume_type_get_all(context, inactive, filters=filters,
marker=marker, limit=limit,
sort_keys=sort_keys,
sort_dirs=sort_dirs, offset=offset,
list_result=list_result)
return vol_types
def get_all_types_by_group(context, group_id):
"""Get all volume_types in a group."""
vol_types = db.volume_type_get_all_by_group(context, group_id)
return vol_types
def get_volume_type(ctxt, id, expected_fields=None):
"""Retrieves single volume type by id."""
if id is None:
msg = _("id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
if ctxt is None:
ctxt = context.get_admin_context()
return db.volume_type_get(ctxt, id, expected_fields=expected_fields)
def get_by_name_or_id(context, identity):
"""Retrieves volume type by id or name"""
if not uuidutils.is_uuid_like(identity):
return get_volume_type_by_name(context, identity)
return get_volume_type(context, identity)
def get_volume_type_by_name(context, name):
"""Retrieves single volume type by name."""
if name is None:
msg = _("name cannot be None")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_get_by_name(context, name)
def get_default_volume_type():
"""Get the default volume type."""
name = CONF.default_volume_type
vol_type = {}
if name is not None:
ctxt = context.get_admin_context()
try:
vol_type = get_volume_type_by_name(ctxt, name)
except exception.VolumeTypeNotFoundByName:
# Couldn't find volume type with the name in default_volume_type
# flag, record this issue and move on
# TODO(zhiteng) consider add notification to warn admin
LOG.exception(_LE('Default volume type is not found. '
'Please check default_volume_type config:'))
return vol_type
def get_volume_type_extra_specs(volume_type_id, key=False):
volume_type = get_volume_type(context.get_admin_context(),
volume_type_id)
extra_specs = volume_type['extra_specs']
if key:
if extra_specs.get(key):
return extra_specs.get(key)
else:
return False
else:
return extra_specs
def is_public_volume_type(context, volume_type_id):
"""Return is_public boolean value of volume type"""
volume_type = db.volume_type_get(context, volume_type_id)
return volume_type['is_public']
def add_volume_type_access(context, volume_type_id, project_id):
"""Add access to volume type for project_id."""
if volume_type_id is None:
msg = _("volume_type_id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
elevated = context if context.is_admin else context.elevated()
if is_public_volume_type(elevated, volume_type_id):
msg = _("Type access modification is not applicable to public volume "
"type.")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_access_add(elevated, volume_type_id, project_id)
def remove_volume_type_access(context, volume_type_id, project_id):
"""Remove access to volume type for project_id."""
if volume_type_id is None:
msg = _("volume_type_id cannot be None")
raise exception.InvalidVolumeType(reason=msg)
elevated = context if context.is_admin else context.elevated()
if is_public_volume_type(elevated, volume_type_id):
msg = _("Type access modification is not applicable to public volume "
"type.")
raise exception.InvalidVolumeType(reason=msg)
return db.volume_type_access_remove(elevated, volume_type_id, project_id)
def is_encrypted(context, volume_type_id):
return get_volume_type_encryption(context, volume_type_id) is not None
def get_volume_type_encryption(context, volume_type_id):
if volume_type_id is None:
return None
encryption = db.volume_type_encryption_get(context, volume_type_id)
return encryption
def get_volume_type_qos_specs(volume_type_id):
"""Get all qos specs for given volume type."""
ctxt = context.get_admin_context()
res = db.volume_type_qos_specs_get(ctxt,
volume_type_id)
return res
def volume_types_diff(context, vol_type_id1, vol_type_id2):
"""Returns a 'diff' of two volume types and whether they are equal.
Returns a tuple of (diff, equal), where 'equal' is a boolean indicating
whether there is any difference, and 'diff' is a dictionary with the
following format:
.. code-block:: json
{
'extra_specs': {'key1': (value_in_1st_vol_type,
value_in_2nd_vol_type),
'key2': (value_in_1st_vol_type,
value_in_2nd_vol_type),
{...}}
'qos_specs': {'key1': (value_in_1st_vol_type,
value_in_2nd_vol_type),
'key2': (value_in_1st_vol_type,
value_in_2nd_vol_type),
{...}}
'encryption': {'cipher': (value_in_1st_vol_type,
value_in_2nd_vol_type),
{'key_size': (value_in_1st_vol_type,
value_in_2nd_vol_type),
{...}}
}
"""
def _fix_qos_specs(qos_specs):
if qos_specs:
qos_specs.pop('id', None)
qos_specs.pop('name', None)
qos_specs.update(qos_specs.pop('specs', {}))
def _fix_encryption_specs(encryption):
if encryption:
encryption = dict(encryption)
for param in ENCRYPTION_IGNORED_FIELDS:
encryption.pop(param, None)
return encryption
def _dict_diff(dict1, dict2):
res = {}
equal = True
if dict1 is None:
dict1 = {}
if dict2 is None:
dict2 = {}
for k, v in dict1.items():
res[k] = (v, dict2.get(k))
if k not in dict2 or res[k][0] != res[k][1]:
equal = False
for k, v in dict2.items():
res[k] = (dict1.get(k), v)
if k not in dict1 or res[k][0] != res[k][1]:
equal = False
return (res, equal)
all_equal = True
diff = {}
vol_type_data = []
for vol_type_id in (vol_type_id1, vol_type_id2):
if vol_type_id is None:
specs = {'extra_specs': None,
'qos_specs': None,
'encryption': None}
else:
specs = {}
vol_type = get_volume_type(context, vol_type_id)
specs['extra_specs'] = vol_type.get('extra_specs')
qos_specs = get_volume_type_qos_specs(vol_type_id)
specs['qos_specs'] = qos_specs.get('qos_specs')
_fix_qos_specs(specs['qos_specs'])
specs['encryption'] = get_volume_type_encryption(context,
vol_type_id)
specs['encryption'] = _fix_encryption_specs(specs['encryption'])
vol_type_data.append(specs)
diff['extra_specs'], equal = _dict_diff(vol_type_data[0]['extra_specs'],
vol_type_data[1]['extra_specs'])
if not equal:
all_equal = False
diff['qos_specs'], equal = _dict_diff(vol_type_data[0]['qos_specs'],
vol_type_data[1]['qos_specs'])
if not equal:
all_equal = False
diff['encryption'], equal = _dict_diff(vol_type_data[0]['encryption'],
vol_type_data[1]['encryption'])
if not equal:
all_equal = False
return (diff, all_equal)
def volume_types_encryption_changed(context, vol_type_id1, vol_type_id2):
"""Return whether encryptions of two volume types are same."""
def _get_encryption(enc):
enc = dict(enc)
for param in ENCRYPTION_IGNORED_FIELDS:
enc.pop(param, None)
return enc
enc1 = get_volume_type_encryption(context, vol_type_id1)
enc2 = get_volume_type_encryption(context, vol_type_id2)
enc1_filtered = _get_encryption(enc1) if enc1 else None
enc2_filtered = _get_encryption(enc2) if enc2 else None
return enc1_filtered != enc2_filtered
| apache-2.0 |
redapple/scrapy | tests/test_utils_misc/__init__.py | 151 | 2976 | import sys
import os
import unittest
from scrapy.item import Item, Field
from scrapy.utils.misc import load_object, arg_to_iter, walk_modules
__doctests__ = ['scrapy.utils.misc']
class UtilsMiscTestCase(unittest.TestCase):
def test_load_object(self):
obj = load_object('scrapy.utils.misc.load_object')
assert obj is load_object
self.assertRaises(ImportError, load_object, 'nomodule999.mod.function')
self.assertRaises(NameError, load_object, 'scrapy.utils.misc.load_object999')
def test_walk_modules(self):
mods = walk_modules('tests.test_utils_misc.test_walk_modules')
expected = [
'tests.test_utils_misc.test_walk_modules',
'tests.test_utils_misc.test_walk_modules.mod',
'tests.test_utils_misc.test_walk_modules.mod.mod0',
'tests.test_utils_misc.test_walk_modules.mod1',
]
self.assertEquals(set([m.__name__ for m in mods]), set(expected))
mods = walk_modules('tests.test_utils_misc.test_walk_modules.mod')
expected = [
'tests.test_utils_misc.test_walk_modules.mod',
'tests.test_utils_misc.test_walk_modules.mod.mod0',
]
self.assertEquals(set([m.__name__ for m in mods]), set(expected))
mods = walk_modules('tests.test_utils_misc.test_walk_modules.mod1')
expected = [
'tests.test_utils_misc.test_walk_modules.mod1',
]
self.assertEquals(set([m.__name__ for m in mods]), set(expected))
self.assertRaises(ImportError, walk_modules, 'nomodule999')
def test_walk_modules_egg(self):
egg = os.path.join(os.path.dirname(__file__), 'test.egg')
sys.path.append(egg)
try:
mods = walk_modules('testegg')
expected = [
'testegg.spiders',
'testegg.spiders.a',
'testegg.spiders.b',
'testegg'
]
self.assertEquals(set([m.__name__ for m in mods]), set(expected))
finally:
sys.path.remove(egg)
def test_arg_to_iter(self):
class TestItem(Item):
name = Field()
assert hasattr(arg_to_iter(None), '__iter__')
assert hasattr(arg_to_iter(100), '__iter__')
assert hasattr(arg_to_iter('lala'), '__iter__')
assert hasattr(arg_to_iter([1, 2, 3]), '__iter__')
assert hasattr(arg_to_iter(l for l in 'abcd'), '__iter__')
self.assertEqual(list(arg_to_iter(None)), [])
self.assertEqual(list(arg_to_iter('lala')), ['lala'])
self.assertEqual(list(arg_to_iter(100)), [100])
self.assertEqual(list(arg_to_iter(l for l in 'abc')), ['a', 'b', 'c'])
self.assertEqual(list(arg_to_iter([1, 2, 3])), [1, 2, 3])
self.assertEqual(list(arg_to_iter({'a':1})), [{'a': 1}])
self.assertEqual(list(arg_to_iter(TestItem(name="john"))), [TestItem(name="john")])
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
park-bench/confighelper | setup.py | 1 | 1088 | #!/usr/bin/env python2
# Copyright 2015-2018 Joel Allen Luellwitz and Emily Frost
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import setuptools
setuptools.setup(name='parkbenchcommon',
description='Common Parkbench libraries',
author='Joel Allen Luellwitz and Emily Frost',
author_email='eviljoel@linux.com, emfrost@posteo.net',
license='GPLv3',
packages=['parkbenchcommon'],
zipsafe=False)
| gpl-3.0 |
paol/powerline-shell | lib/colortrans.py | 32 | 8246 | #! /usr/bin/env python
"""
Code is modified (fairly heavily) by hryanjones@gmail.com from
https://gist.github.com/MicahElliott/719710
Convert values between RGB tuples and xterm-256 color codes.
Nice long listing of all 256 colors and their codes. Useful for
developing console color themes, or even script output schemes.
Resources:
* http://en.wikipedia.org/wiki/8-bit_color
* http://en.wikipedia.org/wiki/ANSI_escape_code
* /usr/share/X11/rgb.txt
I'm not sure where this script was inspired from. I think I must have
written it from scratch, though it's been several years now.
"""
__author__ = 'Micah Elliott http://MicahElliott.com'
__version__ = '0.1'
__copyright__ = 'Copyright (C) 2011 Micah Elliott. All rights reserved.'
__license__ = 'WTFPL http://sam.zoy.org/wtfpl/'
#---------------------------------------------------------------------
def hexstr2num(hexstr):
return int(hexstr, 16)
def rgbstring2tuple(s):
return tuple([hexstr2num(h) for h in (s[:2], s[2:4], s[4:])])
RGB2SHORT_DICT = {
(0, 0, 0): 16,
(0, 0, 95): 17,
(0, 0, 128): 4,
(0, 0, 135): 18,
(0, 0, 175): 19,
(0, 0, 215): 20,
(0, 0, 255): 12,
(0, 95, 0): 22,
(0, 95, 95): 23,
(0, 95, 135): 24,
(0, 95, 175): 25,
(0, 95, 215): 26,
(0, 95, 255): 27,
(0, 128, 0): 2,
(0, 128, 128): 6,
(0, 135, 0): 28,
(0, 135, 95): 29,
(0, 135, 135): 30,
(0, 135, 175): 31,
(0, 135, 215): 32,
(0, 135, 255): 33,
(0, 175, 0): 34,
(0, 175, 95): 35,
(0, 175, 135): 36,
(0, 175, 175): 37,
(0, 175, 215): 38,
(0, 175, 255): 39,
(0, 215, 0): 40,
(0, 215, 95): 41,
(0, 215, 135): 42,
(0, 215, 175): 43,
(0, 215, 215): 44,
(0, 215, 255): 45,
(0, 255, 0): 46,
(0, 255, 95): 47,
(0, 255, 135): 48,
(0, 255, 175): 49,
(0, 255, 215): 50,
(0, 255, 255): 14,
(8, 8, 8): 232,
(18, 18, 18): 233,
(28, 28, 28): 234,
(38, 38, 38): 235,
(48, 48, 48): 236,
(58, 58, 58): 237,
(68, 68, 68): 238,
(78, 78, 78): 239,
(88, 88, 88): 240,
(95, 0, 0): 52,
(95, 0, 95): 53,
(95, 0, 135): 54,
(95, 0, 175): 55,
(95, 0, 215): 56,
(95, 0, 255): 57,
(95, 95, 0): 58,
(95, 95, 95): 59,
(95, 95, 135): 60,
(95, 95, 175): 61,
(95, 95, 215): 62,
(95, 95, 255): 63,
(95, 135, 0): 64,
(95, 135, 95): 65,
(95, 135, 135): 66,
(95, 135, 175): 67,
(95, 135, 215): 68,
(95, 135, 255): 69,
(95, 175, 0): 70,
(95, 175, 95) : 71,
(95, 175, 135): 72,
(95, 175, 175): 73,
(95, 175, 215): 74,
(95, 175, 255): 75,
(95, 215, 0): 76,
(95, 215, 95) : 77,
(95, 215, 135): 78,
(95, 215, 175): 79,
(95, 215, 215): 80,
(95, 215, 255): 81,
(95, 255, 0): 82,
(95, 255, 95) : 83,
(95, 255, 135): 84,
(95, 255, 175): 85,
(95, 255, 215): 86,
(95, 255, 255): 87,
(98, 98, 98): 241,
(108, 108, 108): 242,
(118, 118, 118): 243,
(128, 0, 0): 1,
(128, 0, 128): 5,
(128, 128, 0): 3,
(128, 128, 128): 244,
(135, 0, 0): 88,
(135, 0, 95): 89,
(135, 0, 135): 90,
(135, 0, 175): 91,
(135, 0, 215): 92,
(135, 0, 255): 93,
(135, 95, 0): 94,
(135, 95, 95): 95,
(135, 95, 135): 96,
(135, 95, 175): 97,
(135, 95, 215): 98,
(135, 95, 255): 99,
(135, 135, 0): 100,
(135, 135, 95): 101,
(135, 135, 135): 102,
(135, 135, 175): 103,
(135, 135, 215): 104,
(135, 135, 255): 105,
(135, 175, 0): 106,
(135, 175, 95): 107,
(135, 175, 135): 108,
(135, 175, 175): 109,
(135, 175, 215): 110,
(135, 175, 255): 111,
(135, 215, 0): 112,
(135, 215, 95): 113,
(135, 215, 135): 114,
(135, 215, 175): 115,
(135, 215, 215): 116,
(135, 215, 255): 117,
(135, 255, 0): 118,
(135, 255, 95): 119,
(135, 255, 135): 120,
(135, 255, 175): 121,
(135, 255, 215): 122,
(135, 255, 255): 123,
(138, 138, 138): 245,
(148, 148, 148): 246,
(158, 158, 158): 247,
(168, 168, 168): 248,
(175, 0, 0): 124,
(175, 0, 95): 125,
(175, 0, 135): 126,
(175, 0, 175): 127,
(175, 0, 215): 128,
(175, 0, 255): 129,
(175, 95, 0): 130,
(175, 95, 95): 131,
(175, 95, 135): 132,
(175, 95, 175): 133,
(175, 95, 215): 134,
(175, 95, 255): 135,
(175, 135, 0): 136,
(175, 135, 95): 137,
(175, 135, 135): 138,
(175, 135, 175): 139,
(175, 135, 215): 140,
(175, 135, 255): 141,
(175, 175, 0): 142,
(175, 175, 95): 143,
(175, 175, 135): 144,
(175, 175, 175): 145,
(175, 175, 215): 146,
(175, 175, 255): 147,
(175, 215, 0): 148,
(175, 215, 95): 149,
(175, 215, 135): 150,
(175, 215, 175): 151,
(175, 215, 215): 152,
(175, 215, 255): 153,
(175, 255, 0): 154,
(175, 255, 95): 155,
(175, 255, 135): 156,
(175, 255, 175): 157,
(175, 255, 215): 158,
(175, 255, 255): 159,
(178, 178, 178): 249,
(188, 188, 188): 250,
(192, 192, 192): 7,
(198, 198, 198): 251,
(208, 208, 208): 252,
(215, 0, 0): 160,
(215, 0, 95): 161,
(215, 0, 135): 162,
(215, 0, 175): 163,
(215, 0, 215): 164,
(215, 0, 255): 165,
(215, 95, 0): 166,
(215, 95, 95): 167,
(215, 95, 135): 168,
(215, 95, 175): 169,
(215, 95, 215): 170,
(215, 95, 255): 171,
(215, 135, 0): 172,
(215, 135, 95): 173,
(215, 135, 135): 174,
(215, 135, 175): 175,
(215, 135, 215): 176,
(215, 135, 255): 177,
(215, 175, 0): 178,
(215, 175, 95): 179,
(215, 175, 135): 180,
(215, 175, 175): 181,
(215, 175, 215): 182,
(215, 175, 255): 183,
(215, 215, 0): 184,
(215, 215, 95): 185,
(215, 215, 135): 186,
(215, 215, 175): 187,
(215, 215, 215): 188,
(215, 215, 255): 189,
(215, 255, 0): 190,
(215, 255, 95): 191,
(215, 255, 135): 192,
(215, 255, 175): 193,
(215, 255, 215): 194,
(215, 255, 255): 195,
(218, 218, 218): 253,
(228, 228, 228): 254,
(238, 238, 238): 255,
(255, 0, 0): 196,
(255, 0, 95): 197,
(255, 0, 135): 198,
(255, 0, 175): 199,
(255, 0, 215): 200,
(255, 0, 255): 13,
(255, 95, 0): 202,
(255, 95, 95): 203,
(255, 95, 135): 204,
(255, 95, 175): 205,
(255, 95, 215): 206,
(255, 95, 255): 207,
(255, 135, 0): 208,
(255, 135, 95): 209,
(255, 135, 135): 210,
(255, 135, 175): 211,
(255, 135, 215): 212,
(255, 135, 255): 213,
(255, 175, 0): 214,
(255, 175, 95): 215,
(255, 175, 135): 216,
(255, 175, 175): 217,
(255, 175, 215): 218,
(255, 175, 255): 219,
(255, 215, 0): 220,
(255, 215, 95): 221,
(255, 215, 135): 222,
(255, 215, 175): 223,
(255, 215, 215): 224,
(255, 215, 255): 225,
(255, 255, 0): 11,
(255, 255, 95): 227,
(255, 255, 135): 228,
(255, 255, 175): 229,
(255, 255, 215): 230,
(255, 255, 255): 231}
def hexstr2num(hexstr):
return int(hexstr, 16)
def rgb2short(r, g, b):
""" Find the closest xterm-256 approximation to the given RGB value.
@param r,g,b: each is a number between 0-255 for the Red, Green, and Blue values
@returns: integer between 0 and 255, compatible with xterm.
>>> rgb2short(18, 52, 86)
23
>>> rgb2short(255, 255, 255)
231
>>> rgb2short(13, 173, 214) # vimeo logo
38
"""
incs = (0x00, 0x5f, 0x87, 0xaf, 0xd7, 0xff)
# Break 6-char RGB code into 3 integer vals.
parts = [ r, g, b]
res = []
for part in parts:
i = 0
while i < len(incs)-1:
s, b = incs[i], incs[i+1] # smaller, bigger
if s <= part <= b:
s1 = abs(s - part)
b1 = abs(b - part)
if s1 < b1: closest = s
else: closest = b
res.append(closest)
break
i += 1
#print '***', res
return RGB2SHORT_DICT[tuple(res)]
#---------------------------------------------------------------------
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit |
wreckJ/intellij-community | python/helpers/pydev/pydevd_attach_to_process/winappdbg/win32/peb_teb.py | 102 | 159230 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2009-2014, Mario Vilas
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice,this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""
PEB and TEB structures, constants and data types.
"""
__revision__ = "$Id$"
from winappdbg.win32.defines import *
from winappdbg.win32.version import os
#==============================================================================
# This is used later on to calculate the list of exported symbols.
_all = None
_all = set(vars().keys())
#==============================================================================
#--- PEB and TEB structures, constants and data types -------------------------
# From http://www.nirsoft.net/kernel_struct/vista/CLIENT_ID.html
#
# typedef struct _CLIENT_ID
# {
# PVOID UniqueProcess;
# PVOID UniqueThread;
# } CLIENT_ID, *PCLIENT_ID;
class CLIENT_ID(Structure):
_fields_ = [
("UniqueProcess", PVOID),
("UniqueThread", PVOID),
]
# From MSDN:
#
# typedef struct _LDR_DATA_TABLE_ENTRY {
# BYTE Reserved1[2];
# LIST_ENTRY InMemoryOrderLinks;
# PVOID Reserved2[2];
# PVOID DllBase;
# PVOID EntryPoint;
# PVOID Reserved3;
# UNICODE_STRING FullDllName;
# BYTE Reserved4[8];
# PVOID Reserved5[3];
# union {
# ULONG CheckSum;
# PVOID Reserved6;
# };
# ULONG TimeDateStamp;
# } LDR_DATA_TABLE_ENTRY, *PLDR_DATA_TABLE_ENTRY;
##class LDR_DATA_TABLE_ENTRY(Structure):
## _fields_ = [
## ("Reserved1", BYTE * 2),
## ("InMemoryOrderLinks", LIST_ENTRY),
## ("Reserved2", PVOID * 2),
## ("DllBase", PVOID),
## ("EntryPoint", PVOID),
## ("Reserved3", PVOID),
## ("FullDllName", UNICODE_STRING),
## ("Reserved4", BYTE * 8),
## ("Reserved5", PVOID * 3),
## ("CheckSum", ULONG),
## ("TimeDateStamp", ULONG),
##]
# From MSDN:
#
# typedef struct _PEB_LDR_DATA {
# BYTE Reserved1[8];
# PVOID Reserved2[3];
# LIST_ENTRY InMemoryOrderModuleList;
# } PEB_LDR_DATA,
# *PPEB_LDR_DATA;
##class PEB_LDR_DATA(Structure):
## _fields_ = [
## ("Reserved1", BYTE),
## ("Reserved2", PVOID),
## ("InMemoryOrderModuleList", LIST_ENTRY),
##]
# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_USER_PROCESS_PARAMETERS.html
# typedef struct _RTL_USER_PROCESS_PARAMETERS {
# ULONG MaximumLength;
# ULONG Length;
# ULONG Flags;
# ULONG DebugFlags;
# PVOID ConsoleHandle;
# ULONG ConsoleFlags;
# HANDLE StdInputHandle;
# HANDLE StdOutputHandle;
# HANDLE StdErrorHandle;
# UNICODE_STRING CurrentDirectoryPath;
# HANDLE CurrentDirectoryHandle;
# UNICODE_STRING DllPath;
# UNICODE_STRING ImagePathName;
# UNICODE_STRING CommandLine;
# PVOID Environment;
# ULONG StartingPositionLeft;
# ULONG StartingPositionTop;
# ULONG Width;
# ULONG Height;
# ULONG CharWidth;
# ULONG CharHeight;
# ULONG ConsoleTextAttributes;
# ULONG WindowFlags;
# ULONG ShowWindowFlags;
# UNICODE_STRING WindowTitle;
# UNICODE_STRING DesktopName;
# UNICODE_STRING ShellInfo;
# UNICODE_STRING RuntimeData;
# RTL_DRIVE_LETTER_CURDIR DLCurrentDirectory[0x20];
# } RTL_USER_PROCESS_PARAMETERS, *PRTL_USER_PROCESS_PARAMETERS;
# kd> dt _RTL_USER_PROCESS_PARAMETERS
# ntdll!_RTL_USER_PROCESS_PARAMETERS
# +0x000 MaximumLength : Uint4B
# +0x004 Length : Uint4B
# +0x008 Flags : Uint4B
# +0x00c DebugFlags : Uint4B
# +0x010 ConsoleHandle : Ptr32 Void
# +0x014 ConsoleFlags : Uint4B
# +0x018 StandardInput : Ptr32 Void
# +0x01c StandardOutput : Ptr32 Void
# +0x020 StandardError : Ptr32 Void
# +0x024 CurrentDirectory : _CURDIR
# +0x030 DllPath : _UNICODE_STRING
# +0x038 ImagePathName : _UNICODE_STRING
# +0x040 CommandLine : _UNICODE_STRING
# +0x048 Environment : Ptr32 Void
# +0x04c StartingX : Uint4B
# +0x050 StartingY : Uint4B
# +0x054 CountX : Uint4B
# +0x058 CountY : Uint4B
# +0x05c CountCharsX : Uint4B
# +0x060 CountCharsY : Uint4B
# +0x064 FillAttribute : Uint4B
# +0x068 WindowFlags : Uint4B
# +0x06c ShowWindowFlags : Uint4B
# +0x070 WindowTitle : _UNICODE_STRING
# +0x078 DesktopInfo : _UNICODE_STRING
# +0x080 ShellInfo : _UNICODE_STRING
# +0x088 RuntimeData : _UNICODE_STRING
# +0x090 CurrentDirectores : [32] _RTL_DRIVE_LETTER_CURDIR
# +0x290 EnvironmentSize : Uint4B
##class RTL_USER_PROCESS_PARAMETERS(Structure):
## _fields_ = [
## ("MaximumLength", ULONG),
## ("Length", ULONG),
## ("Flags", ULONG),
## ("DebugFlags", ULONG),
## ("ConsoleHandle", PVOID),
## ("ConsoleFlags", ULONG),
## ("StandardInput", HANDLE),
## ("StandardOutput", HANDLE),
## ("StandardError", HANDLE),
## ("CurrentDirectory", CURDIR),
## ("DllPath", UNICODE_STRING),
## ("ImagePathName", UNICODE_STRING),
## ("CommandLine", UNICODE_STRING),
## ("Environment", PVOID),
## ("StartingX", ULONG),
## ("StartingY", ULONG),
## ("CountX", ULONG),
## ("CountY", ULONG),
## ("CountCharsX", ULONG),
## ("CountCharsY", ULONG),
## ("FillAttribute", ULONG),
## ("WindowFlags", ULONG),
## ("ShowWindowFlags", ULONG),
## ("WindowTitle", UNICODE_STRING),
## ("DesktopInfo", UNICODE_STRING),
## ("ShellInfo", UNICODE_STRING),
## ("RuntimeData", UNICODE_STRING),
## ("CurrentDirectores", RTL_DRIVE_LETTER_CURDIR * 32), # typo here?
##
## # Windows 2008 and Vista
## ("EnvironmentSize", ULONG),
##]
## @property
## def CurrentDirectories(self):
## return self.CurrentDirectores
# From MSDN:
#
# typedef struct _RTL_USER_PROCESS_PARAMETERS {
# BYTE Reserved1[16];
# PVOID Reserved2[10];
# UNICODE_STRING ImagePathName;
# UNICODE_STRING CommandLine;
# } RTL_USER_PROCESS_PARAMETERS,
# *PRTL_USER_PROCESS_PARAMETERS;
class RTL_USER_PROCESS_PARAMETERS(Structure):
_fields_ = [
("Reserved1", BYTE * 16),
("Reserved2", PVOID * 10),
("ImagePathName", UNICODE_STRING),
("CommandLine", UNICODE_STRING),
("Environment", PVOID), # undocumented!
#
# XXX TODO
# This structure should be defined with all undocumented fields for
# each version of Windows, just like it's being done for PEB and TEB.
#
]
PPS_POST_PROCESS_INIT_ROUTINE = PVOID
#from MSDN:
#
# typedef struct _PEB {
# BYTE Reserved1[2];
# BYTE BeingDebugged;
# BYTE Reserved2[21];
# PPEB_LDR_DATA LoaderData;
# PRTL_USER_PROCESS_PARAMETERS ProcessParameters;
# BYTE Reserved3[520];
# PPS_POST_PROCESS_INIT_ROUTINE PostProcessInitRoutine;
# BYTE Reserved4[136];
# ULONG SessionId;
# } PEB;
##class PEB(Structure):
## _fields_ = [
## ("Reserved1", BYTE * 2),
## ("BeingDebugged", BYTE),
## ("Reserved2", BYTE * 21),
## ("LoaderData", PVOID, # PPEB_LDR_DATA
## ("ProcessParameters", PVOID, # PRTL_USER_PROCESS_PARAMETERS
## ("Reserved3", BYTE * 520),
## ("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
## ("Reserved4", BYTE),
## ("SessionId", ULONG),
##]
# from MSDN:
#
# typedef struct _TEB {
# BYTE Reserved1[1952];
# PVOID Reserved2[412];
# PVOID TlsSlots[64];
# BYTE Reserved3[8];
# PVOID Reserved4[26];
# PVOID ReservedForOle;
# PVOID Reserved5[4];
# PVOID TlsExpansionSlots;
# } TEB,
# *PTEB;
##class TEB(Structure):
## _fields_ = [
## ("Reserved1", PVOID * 1952),
## ("Reserved2", PVOID * 412),
## ("TlsSlots", PVOID * 64),
## ("Reserved3", BYTE * 8),
## ("Reserved4", PVOID * 26),
## ("ReservedForOle", PVOID),
## ("Reserved5", PVOID * 4),
## ("TlsExpansionSlots", PVOID),
##]
# from http://undocumented.ntinternals.net/UserMode/Structures/LDR_MODULE.html
#
# typedef struct _LDR_MODULE {
# LIST_ENTRY InLoadOrderModuleList;
# LIST_ENTRY InMemoryOrderModuleList;
# LIST_ENTRY InInitializationOrderModuleList;
# PVOID BaseAddress;
# PVOID EntryPoint;
# ULONG SizeOfImage;
# UNICODE_STRING FullDllName;
# UNICODE_STRING BaseDllName;
# ULONG Flags;
# SHORT LoadCount;
# SHORT TlsIndex;
# LIST_ENTRY HashTableEntry;
# ULONG TimeDateStamp;
# } LDR_MODULE, *PLDR_MODULE;
class LDR_MODULE(Structure):
_fields_ = [
("InLoadOrderModuleList", LIST_ENTRY),
("InMemoryOrderModuleList", LIST_ENTRY),
("InInitializationOrderModuleList", LIST_ENTRY),
("BaseAddress", PVOID),
("EntryPoint", PVOID),
("SizeOfImage", ULONG),
("FullDllName", UNICODE_STRING),
("BaseDllName", UNICODE_STRING),
("Flags", ULONG),
("LoadCount", SHORT),
("TlsIndex", SHORT),
("HashTableEntry", LIST_ENTRY),
("TimeDateStamp", ULONG),
]
# from http://undocumented.ntinternals.net/UserMode/Structures/PEB_LDR_DATA.html
#
# typedef struct _PEB_LDR_DATA {
# ULONG Length;
# BOOLEAN Initialized;
# PVOID SsHandle;
# LIST_ENTRY InLoadOrderModuleList;
# LIST_ENTRY InMemoryOrderModuleList;
# LIST_ENTRY InInitializationOrderModuleList;
# } PEB_LDR_DATA, *PPEB_LDR_DATA;
class PEB_LDR_DATA(Structure):
_fields_ = [
("Length", ULONG),
("Initialized", BOOLEAN),
("SsHandle", PVOID),
("InLoadOrderModuleList", LIST_ENTRY),
("InMemoryOrderModuleList", LIST_ENTRY),
("InInitializationOrderModuleList", LIST_ENTRY),
]
# From http://undocumented.ntinternals.net/UserMode/Undocumented%20Functions/NT%20Objects/Process/PEB_FREE_BLOCK.html
#
# typedef struct _PEB_FREE_BLOCK {
# PEB_FREE_BLOCK *Next;
# ULONG Size;
# } PEB_FREE_BLOCK, *PPEB_FREE_BLOCK;
class PEB_FREE_BLOCK(Structure):
pass
##PPEB_FREE_BLOCK = POINTER(PEB_FREE_BLOCK)
PPEB_FREE_BLOCK = PVOID
PEB_FREE_BLOCK._fields_ = [
("Next", PPEB_FREE_BLOCK),
("Size", ULONG),
]
# From http://undocumented.ntinternals.net/UserMode/Structures/RTL_DRIVE_LETTER_CURDIR.html
#
# typedef struct _RTL_DRIVE_LETTER_CURDIR {
# USHORT Flags;
# USHORT Length;
# ULONG TimeStamp;
# UNICODE_STRING DosPath;
# } RTL_DRIVE_LETTER_CURDIR, *PRTL_DRIVE_LETTER_CURDIR;
class RTL_DRIVE_LETTER_CURDIR(Structure):
_fields_ = [
("Flags", USHORT),
("Length", USHORT),
("TimeStamp", ULONG),
("DosPath", UNICODE_STRING),
]
# From http://www.nirsoft.net/kernel_struct/vista/CURDIR.html
#
# typedef struct _CURDIR
# {
# UNICODE_STRING DosPath;
# PVOID Handle;
# } CURDIR, *PCURDIR;
class CURDIR(Structure):
_fields_ = [
("DosPath", UNICODE_STRING),
("Handle", PVOID),
]
# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION_DEBUG.html
#
# typedef struct _RTL_CRITICAL_SECTION_DEBUG
# {
# WORD Type;
# WORD CreatorBackTraceIndex;
# PRTL_CRITICAL_SECTION CriticalSection;
# LIST_ENTRY ProcessLocksList;
# ULONG EntryCount;
# ULONG ContentionCount;
# ULONG Flags;
# WORD CreatorBackTraceIndexHigh;
# WORD SpareUSHORT;
# } RTL_CRITICAL_SECTION_DEBUG, *PRTL_CRITICAL_SECTION_DEBUG;
#
# From http://www.nirsoft.net/kernel_struct/vista/RTL_CRITICAL_SECTION.html
#
# typedef struct _RTL_CRITICAL_SECTION
# {
# PRTL_CRITICAL_SECTION_DEBUG DebugInfo;
# LONG LockCount;
# LONG RecursionCount;
# PVOID OwningThread;
# PVOID LockSemaphore;
# ULONG SpinCount;
# } RTL_CRITICAL_SECTION, *PRTL_CRITICAL_SECTION;
#
class RTL_CRITICAL_SECTION(Structure):
_fields_ = [
("DebugInfo", PVOID), # PRTL_CRITICAL_SECTION_DEBUG
("LockCount", LONG),
("RecursionCount", LONG),
("OwningThread", PVOID),
("LockSemaphore", PVOID),
("SpinCount", ULONG),
]
class RTL_CRITICAL_SECTION_DEBUG(Structure):
_fields_ = [
("Type", WORD),
("CreatorBackTraceIndex", WORD),
("CriticalSection", PVOID), # PRTL_CRITICAL_SECTION
("ProcessLocksList", LIST_ENTRY),
("EntryCount", ULONG),
("ContentionCount", ULONG),
("Flags", ULONG),
("CreatorBackTraceIndexHigh", WORD),
("SpareUSHORT", WORD),
]
PRTL_CRITICAL_SECTION = POINTER(RTL_CRITICAL_SECTION)
PRTL_CRITICAL_SECTION_DEBUG = POINTER(RTL_CRITICAL_SECTION_DEBUG)
PPEB_LDR_DATA = POINTER(PEB_LDR_DATA)
PRTL_USER_PROCESS_PARAMETERS = POINTER(RTL_USER_PROCESS_PARAMETERS)
PPEBLOCKROUTINE = PVOID
# BitField
ImageUsesLargePages = 1 << 0
IsProtectedProcess = 1 << 1
IsLegacyProcess = 1 << 2
IsImageDynamicallyRelocated = 1 << 3
SkipPatchingUser32Forwarders = 1 << 4
# CrossProcessFlags
ProcessInJob = 1 << 0
ProcessInitializing = 1 << 1
ProcessUsingVEH = 1 << 2
ProcessUsingVCH = 1 << 3
ProcessUsingFTH = 1 << 4
# TracingFlags
HeapTracingEnabled = 1 << 0
CritSecTracingEnabled = 1 << 1
# NtGlobalFlags
FLG_VALID_BITS = 0x003FFFFF # not a flag
FLG_STOP_ON_EXCEPTION = 0x00000001
FLG_SHOW_LDR_SNAPS = 0x00000002
FLG_DEBUG_INITIAL_COMMAND = 0x00000004
FLG_STOP_ON_HUNG_GUI = 0x00000008
FLG_HEAP_ENABLE_TAIL_CHECK = 0x00000010
FLG_HEAP_ENABLE_FREE_CHECK = 0x00000020
FLG_HEAP_VALIDATE_PARAMETERS = 0x00000040
FLG_HEAP_VALIDATE_ALL = 0x00000080
FLG_POOL_ENABLE_TAIL_CHECK = 0x00000100
FLG_POOL_ENABLE_FREE_CHECK = 0x00000200
FLG_POOL_ENABLE_TAGGING = 0x00000400
FLG_HEAP_ENABLE_TAGGING = 0x00000800
FLG_USER_STACK_TRACE_DB = 0x00001000
FLG_KERNEL_STACK_TRACE_DB = 0x00002000
FLG_MAINTAIN_OBJECT_TYPELIST = 0x00004000
FLG_HEAP_ENABLE_TAG_BY_DLL = 0x00008000
FLG_IGNORE_DEBUG_PRIV = 0x00010000
FLG_ENABLE_CSRDEBUG = 0x00020000
FLG_ENABLE_KDEBUG_SYMBOL_LOAD = 0x00040000
FLG_DISABLE_PAGE_KERNEL_STACKS = 0x00080000
FLG_HEAP_ENABLE_CALL_TRACING = 0x00100000
FLG_HEAP_DISABLE_COALESCING = 0x00200000
FLG_ENABLE_CLOSE_EXCEPTION = 0x00400000
FLG_ENABLE_EXCEPTION_LOGGING = 0x00800000
FLG_ENABLE_HANDLE_TYPE_TAGGING = 0x01000000
FLG_HEAP_PAGE_ALLOCS = 0x02000000
FLG_DEBUG_WINLOGON = 0x04000000
FLG_ENABLE_DBGPRINT_BUFFERING = 0x08000000
FLG_EARLY_CRITICAL_SECTION_EVT = 0x10000000
FLG_DISABLE_DLL_VERIFICATION = 0x80000000
class _PEB_NT(Structure):
_pack_ = 4
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID),
("FastPebLockRoutine", PVOID), # PPEBLOCKROUTINE
("FastPebUnlockRoutine", PVOID), # PPEBLOCKROUTINE
("EnvironmentUpdateCount", ULONG),
("KernelCallbackTable", PVOID), # Ptr32 Ptr32 Void
("EventLogSection", PVOID),
("EventLog", PVOID),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", ULONG),
("TlsBitmap", PVOID),
("TlsBitmapBits", ULONG * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", ULONG),
("NtGlobalFlag", ULONG),
("Spare2", BYTE * 4),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", ULONG),
("HeapSegmentCommit", ULONG),
("HeapDeCommitTotalFreeThreshold", ULONG),
("HeapDeCommitFreeBlockThreshold", ULONG),
("NumberOfHeaps", ULONG),
("MaximumNumberOfHeaps", ULONG),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", PVOID),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", ULONG),
("OSMinorVersion", ULONG),
("OSBuildNumber", ULONG),
("OSPlatformId", ULONG),
("ImageSubSystem", ULONG),
("ImageSubSystemMajorVersion", ULONG),
("ImageSubSystemMinorVersion", ULONG),
("ImageProcessAffinityMask", ULONG),
("GdiHandleBuffer", ULONG * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", ULONG),
("TlsExpansionBitmapBits", BYTE * 128),
("SessionId", ULONG),
]
# not really, but "dt _PEB" in w2k isn't working for me :(
_PEB_2000 = _PEB_NT
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 SpareBool : UChar
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 FastPebLockRoutine : Ptr32 Void
# +0x024 FastPebUnlockRoutine : Ptr32 Void
# +0x028 EnvironmentUpdateCount : Uint4B
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 AtlThunkSListPtr32 : Uint4B
# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 Void
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ImageProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 Void
# +0x1fc ProcessAssemblyStorageMap : Ptr32 Void
# +0x200 SystemDefaultActivationContextData : Ptr32 Void
# +0x204 SystemAssemblyStorageMap : Ptr32 Void
# +0x208 MinimumStackCommit : Uint4B
class _PEB_XP(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("SpareBool", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID),
("FastPebLockRoutine", PVOID),
("FastPebUnlockRoutine", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
]
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 SpareBits : Pos 1, 7 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 SparePtr2 : Ptr64 Void
# +0x050 EnvironmentUpdateCount : Uint4B
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 SpareUlong : Uint4B
# +0x068 FreeList : Ptr64 _PEB_FREE_BLOCK
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 ReadOnlySharedMemoryHeap : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ImageProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 Ptr64 Void
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
class _PEB_XP_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("SparePtr2", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # Ptr64 Ptr64 Void
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
]
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 SpareBits : Pos 1, 7 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 SparePtr2 : Ptr32 Void
# +0x028 EnvironmentUpdateCount : Uint4B
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 SpareUlong : Uint4B
# +0x038 FreeList : Ptr32 _PEB_FREE_BLOCK
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 ReadOnlySharedMemoryHeap : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ImageProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 Ptr32 Void
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
class _PEB_2003(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("SparePtr2", PVOID),
("EnvironmentUpdateCount", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("FreeList", PVOID), # PPEB_FREE_BLOCK
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("ReadOnlySharedMemoryHeap", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ImageProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # Ptr32 Ptr32 Void
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
]
_PEB_2003_64 = _PEB_XP_64
_PEB_2003_R2 = _PEB_2003
_PEB_2003_R2_64 = _PEB_2003_64
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ReservedBits0 : Pos 4, 28 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 SpareUlong : Uint4B
# +0x038 SparePebPtr0 : Uint4B
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
class _PEB_2008(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("SparePebPtr0", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 IFEOKey : Ptr64 Void
# +0x050 CrossProcessFlags : Uint4B
# +0x050 ProcessInJob : Pos 0, 1 Bit
# +0x050 ProcessInitializing : Pos 1, 1 Bit
# +0x050 ProcessUsingVEH : Pos 2, 1 Bit
# +0x050 ProcessUsingVCH : Pos 3, 1 Bit
# +0x050 ReservedBits0 : Pos 4, 28 Bits
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x058 UserSharedInfoPtr : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 SpareUlong : Uint4B
# +0x068 SparePebPtr0 : Uint8B
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 HotpatchInformation : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ActiveProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
# +0x358 WerRegistrationData : Ptr64 Void
# +0x360 WerShipAssertPtr : Ptr64 Void
class _PEB_2008_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("SpareUlong", DWORD),
("SparePebPtr0", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr64 Ptr64 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ProcessUsingFTH : Pos 4, 1 Bit
# +0x028 ReservedBits0 : Pos 5, 27 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 AtlThunkSListPtr32 : Uint4B
# +0x038 ApiSetMap : Ptr32 Void
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
# +0x238 pContextData : Ptr32 Void
# +0x23c pImageHeaderHash : Ptr32 Void
# +0x240 TracingFlags : Uint4B
# +0x240 HeapTracingEnabled : Pos 0, 1 Bit
# +0x240 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x240 SpareTracingBits : Pos 2, 30 Bits
class _PEB_2008_R2(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", PVOID),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
("TracingFlags", DWORD),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x008 Mutant : Ptr64 Void
# +0x010 ImageBaseAddress : Ptr64 Void
# +0x018 Ldr : Ptr64 _PEB_LDR_DATA
# +0x020 ProcessParameters : Ptr64 _RTL_USER_PROCESS_PARAMETERS
# +0x028 SubSystemData : Ptr64 Void
# +0x030 ProcessHeap : Ptr64 Void
# +0x038 FastPebLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x040 AtlThunkSListPtr : Ptr64 Void
# +0x048 IFEOKey : Ptr64 Void
# +0x050 CrossProcessFlags : Uint4B
# +0x050 ProcessInJob : Pos 0, 1 Bit
# +0x050 ProcessInitializing : Pos 1, 1 Bit
# +0x050 ProcessUsingVEH : Pos 2, 1 Bit
# +0x050 ProcessUsingVCH : Pos 3, 1 Bit
# +0x050 ProcessUsingFTH : Pos 4, 1 Bit
# +0x050 ReservedBits0 : Pos 5, 27 Bits
# +0x058 KernelCallbackTable : Ptr64 Void
# +0x058 UserSharedInfoPtr : Ptr64 Void
# +0x060 SystemReserved : [1] Uint4B
# +0x064 AtlThunkSListPtr32 : Uint4B
# +0x068 ApiSetMap : Ptr64 Void
# +0x070 TlsExpansionCounter : Uint4B
# +0x078 TlsBitmap : Ptr64 Void
# +0x080 TlsBitmapBits : [2] Uint4B
# +0x088 ReadOnlySharedMemoryBase : Ptr64 Void
# +0x090 HotpatchInformation : Ptr64 Void
# +0x098 ReadOnlyStaticServerData : Ptr64 Ptr64 Void
# +0x0a0 AnsiCodePageData : Ptr64 Void
# +0x0a8 OemCodePageData : Ptr64 Void
# +0x0b0 UnicodeCaseTableData : Ptr64 Void
# +0x0b8 NumberOfProcessors : Uint4B
# +0x0bc NtGlobalFlag : Uint4B
# +0x0c0 CriticalSectionTimeout : _LARGE_INTEGER
# +0x0c8 HeapSegmentReserve : Uint8B
# +0x0d0 HeapSegmentCommit : Uint8B
# +0x0d8 HeapDeCommitTotalFreeThreshold : Uint8B
# +0x0e0 HeapDeCommitFreeBlockThreshold : Uint8B
# +0x0e8 NumberOfHeaps : Uint4B
# +0x0ec MaximumNumberOfHeaps : Uint4B
# +0x0f0 ProcessHeaps : Ptr64 Ptr64 Void
# +0x0f8 GdiSharedHandleTable : Ptr64 Void
# +0x100 ProcessStarterHelper : Ptr64 Void
# +0x108 GdiDCAttributeList : Uint4B
# +0x110 LoaderLock : Ptr64 _RTL_CRITICAL_SECTION
# +0x118 OSMajorVersion : Uint4B
# +0x11c OSMinorVersion : Uint4B
# +0x120 OSBuildNumber : Uint2B
# +0x122 OSCSDVersion : Uint2B
# +0x124 OSPlatformId : Uint4B
# +0x128 ImageSubsystem : Uint4B
# +0x12c ImageSubsystemMajorVersion : Uint4B
# +0x130 ImageSubsystemMinorVersion : Uint4B
# +0x138 ActiveProcessAffinityMask : Uint8B
# +0x140 GdiHandleBuffer : [60] Uint4B
# +0x230 PostProcessInitRoutine : Ptr64 void
# +0x238 TlsExpansionBitmap : Ptr64 Void
# +0x240 TlsExpansionBitmapBits : [32] Uint4B
# +0x2c0 SessionId : Uint4B
# +0x2c8 AppCompatFlags : _ULARGE_INTEGER
# +0x2d0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x2d8 pShimData : Ptr64 Void
# +0x2e0 AppCompatInfo : Ptr64 Void
# +0x2e8 CSDVersion : _UNICODE_STRING
# +0x2f8 ActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x300 ProcessAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x308 SystemDefaultActivationContextData : Ptr64 _ACTIVATION_CONTEXT_DATA
# +0x310 SystemAssemblyStorageMap : Ptr64 _ASSEMBLY_STORAGE_MAP
# +0x318 MinimumStackCommit : Uint8B
# +0x320 FlsCallback : Ptr64 _FLS_CALLBACK_INFO
# +0x328 FlsListHead : _LIST_ENTRY
# +0x338 FlsBitmap : Ptr64 Void
# +0x340 FlsBitmapBits : [4] Uint4B
# +0x350 FlsHighIndex : Uint4B
# +0x358 WerRegistrationData : Ptr64 Void
# +0x360 WerShipAssertPtr : Ptr64 Void
# +0x368 pContextData : Ptr64 Void
# +0x370 pImageHeaderHash : Ptr64 Void
# +0x378 TracingFlags : Uint4B
# +0x378 HeapTracingEnabled : Pos 0, 1 Bit
# +0x378 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x378 SpareTracingBits : Pos 2, 30 Bits
class _PEB_2008_R2_64(Structure):
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("AtlThunkSListPtr32", DWORD),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", QWORD),
("HeapSegmentCommit", QWORD),
("HeapDeCommitTotalFreeThreshold", QWORD),
("HeapDeCommitFreeBlockThreshold", QWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr64 Ptr64 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", QWORD),
("GdiHandleBuffer", DWORD * 60),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", QWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
("TracingFlags", DWORD),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
_PEB_Vista = _PEB_2008
_PEB_Vista_64 = _PEB_2008_64
_PEB_W7 = _PEB_2008_R2
_PEB_W7_64 = _PEB_2008_R2_64
# +0x000 InheritedAddressSpace : UChar
# +0x001 ReadImageFileExecOptions : UChar
# +0x002 BeingDebugged : UChar
# +0x003 BitField : UChar
# +0x003 ImageUsesLargePages : Pos 0, 1 Bit
# +0x003 IsProtectedProcess : Pos 1, 1 Bit
# +0x003 IsLegacyProcess : Pos 2, 1 Bit
# +0x003 IsImageDynamicallyRelocated : Pos 3, 1 Bit
# +0x003 SkipPatchingUser32Forwarders : Pos 4, 1 Bit
# +0x003 SpareBits : Pos 5, 3 Bits
# +0x004 Mutant : Ptr32 Void
# +0x008 ImageBaseAddress : Ptr32 Void
# +0x00c Ldr : Ptr32 _PEB_LDR_DATA
# +0x010 ProcessParameters : Ptr32 _RTL_USER_PROCESS_PARAMETERS
# +0x014 SubSystemData : Ptr32 Void
# +0x018 ProcessHeap : Ptr32 Void
# +0x01c FastPebLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x020 AtlThunkSListPtr : Ptr32 Void
# +0x024 IFEOKey : Ptr32 Void
# +0x028 CrossProcessFlags : Uint4B
# +0x028 ProcessInJob : Pos 0, 1 Bit
# +0x028 ProcessInitializing : Pos 1, 1 Bit
# +0x028 ProcessUsingVEH : Pos 2, 1 Bit
# +0x028 ProcessUsingVCH : Pos 3, 1 Bit
# +0x028 ProcessUsingFTH : Pos 4, 1 Bit
# +0x028 ReservedBits0 : Pos 5, 27 Bits
# +0x02c KernelCallbackTable : Ptr32 Void
# +0x02c UserSharedInfoPtr : Ptr32 Void
# +0x030 SystemReserved : [1] Uint4B
# +0x034 TracingFlags : Uint4B
# +0x034 HeapTracingEnabled : Pos 0, 1 Bit
# +0x034 CritSecTracingEnabled : Pos 1, 1 Bit
# +0x034 SpareTracingBits : Pos 2, 30 Bits
# +0x038 ApiSetMap : Ptr32 Void
# +0x03c TlsExpansionCounter : Uint4B
# +0x040 TlsBitmap : Ptr32 Void
# +0x044 TlsBitmapBits : [2] Uint4B
# +0x04c ReadOnlySharedMemoryBase : Ptr32 Void
# +0x050 HotpatchInformation : Ptr32 Void
# +0x054 ReadOnlyStaticServerData : Ptr32 Ptr32 Void
# +0x058 AnsiCodePageData : Ptr32 Void
# +0x05c OemCodePageData : Ptr32 Void
# +0x060 UnicodeCaseTableData : Ptr32 Void
# +0x064 NumberOfProcessors : Uint4B
# +0x068 NtGlobalFlag : Uint4B
# +0x070 CriticalSectionTimeout : _LARGE_INTEGER
# +0x078 HeapSegmentReserve : Uint4B
# +0x07c HeapSegmentCommit : Uint4B
# +0x080 HeapDeCommitTotalFreeThreshold : Uint4B
# +0x084 HeapDeCommitFreeBlockThreshold : Uint4B
# +0x088 NumberOfHeaps : Uint4B
# +0x08c MaximumNumberOfHeaps : Uint4B
# +0x090 ProcessHeaps : Ptr32 Ptr32 Void
# +0x094 GdiSharedHandleTable : Ptr32 Void
# +0x098 ProcessStarterHelper : Ptr32 Void
# +0x09c GdiDCAttributeList : Uint4B
# +0x0a0 LoaderLock : Ptr32 _RTL_CRITICAL_SECTION
# +0x0a4 OSMajorVersion : Uint4B
# +0x0a8 OSMinorVersion : Uint4B
# +0x0ac OSBuildNumber : Uint2B
# +0x0ae OSCSDVersion : Uint2B
# +0x0b0 OSPlatformId : Uint4B
# +0x0b4 ImageSubsystem : Uint4B
# +0x0b8 ImageSubsystemMajorVersion : Uint4B
# +0x0bc ImageSubsystemMinorVersion : Uint4B
# +0x0c0 ActiveProcessAffinityMask : Uint4B
# +0x0c4 GdiHandleBuffer : [34] Uint4B
# +0x14c PostProcessInitRoutine : Ptr32 void
# +0x150 TlsExpansionBitmap : Ptr32 Void
# +0x154 TlsExpansionBitmapBits : [32] Uint4B
# +0x1d4 SessionId : Uint4B
# +0x1d8 AppCompatFlags : _ULARGE_INTEGER
# +0x1e0 AppCompatFlagsUser : _ULARGE_INTEGER
# +0x1e8 pShimData : Ptr32 Void
# +0x1ec AppCompatInfo : Ptr32 Void
# +0x1f0 CSDVersion : _UNICODE_STRING
# +0x1f8 ActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x1fc ProcessAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x200 SystemDefaultActivationContextData : Ptr32 _ACTIVATION_CONTEXT_DATA
# +0x204 SystemAssemblyStorageMap : Ptr32 _ASSEMBLY_STORAGE_MAP
# +0x208 MinimumStackCommit : Uint4B
# +0x20c FlsCallback : Ptr32 _FLS_CALLBACK_INFO
# +0x210 FlsListHead : _LIST_ENTRY
# +0x218 FlsBitmap : Ptr32 Void
# +0x21c FlsBitmapBits : [4] Uint4B
# +0x22c FlsHighIndex : Uint4B
# +0x230 WerRegistrationData : Ptr32 Void
# +0x234 WerShipAssertPtr : Ptr32 Void
# +0x238 pContextData : Ptr32 Void
# +0x23c pImageHeaderHash : Ptr32 Void
class _PEB_W7_Beta(Structure):
"""
This definition of the PEB structure is only valid for the beta versions
of Windows 7. For the final version of Windows 7 use L{_PEB_W7} instead.
This structure is not chosen automatically.
"""
_pack_ = 8
_fields_ = [
("InheritedAddressSpace", BOOLEAN),
("ReadImageFileExecOptions", UCHAR),
("BeingDebugged", BOOLEAN),
("BitField", UCHAR),
("Mutant", HANDLE),
("ImageBaseAddress", PVOID),
("Ldr", PVOID), # PPEB_LDR_DATA
("ProcessParameters", PVOID), # PRTL_USER_PROCESS_PARAMETERS
("SubSystemData", PVOID),
("ProcessHeap", PVOID),
("FastPebLock", PVOID), # PRTL_CRITICAL_SECTION
("AtlThunkSListPtr", PVOID),
("IFEOKey", PVOID),
("CrossProcessFlags", DWORD),
("KernelCallbackTable", PVOID),
("SystemReserved", DWORD),
("TracingFlags", DWORD),
("ApiSetMap", PVOID),
("TlsExpansionCounter", DWORD),
("TlsBitmap", PVOID),
("TlsBitmapBits", DWORD * 2),
("ReadOnlySharedMemoryBase", PVOID),
("HotpatchInformation", PVOID),
("ReadOnlyStaticServerData", PVOID), # Ptr32 Ptr32 Void
("AnsiCodePageData", PVOID),
("OemCodePageData", PVOID),
("UnicodeCaseTableData", PVOID),
("NumberOfProcessors", DWORD),
("NtGlobalFlag", DWORD),
("CriticalSectionTimeout", LONGLONG), # LARGE_INTEGER
("HeapSegmentReserve", DWORD),
("HeapSegmentCommit", DWORD),
("HeapDeCommitTotalFreeThreshold", DWORD),
("HeapDeCommitFreeBlockThreshold", DWORD),
("NumberOfHeaps", DWORD),
("MaximumNumberOfHeaps", DWORD),
("ProcessHeaps", PVOID), # Ptr32 Ptr32 Void
("GdiSharedHandleTable", PVOID),
("ProcessStarterHelper", PVOID),
("GdiDCAttributeList", DWORD),
("LoaderLock", PVOID), # PRTL_CRITICAL_SECTION
("OSMajorVersion", DWORD),
("OSMinorVersion", DWORD),
("OSBuildNumber", WORD),
("OSCSDVersion", WORD),
("OSPlatformId", DWORD),
("ImageSubsystem", DWORD),
("ImageSubsystemMajorVersion", DWORD),
("ImageSubsystemMinorVersion", DWORD),
("ActiveProcessAffinityMask", DWORD),
("GdiHandleBuffer", DWORD * 34),
("PostProcessInitRoutine", PPS_POST_PROCESS_INIT_ROUTINE),
("TlsExpansionBitmap", PVOID),
("TlsExpansionBitmapBits", DWORD * 32),
("SessionId", DWORD),
("AppCompatFlags", ULONGLONG), # ULARGE_INTEGER
("AppCompatFlagsUser", ULONGLONG), # ULARGE_INTEGER
("pShimData", PVOID),
("AppCompatInfo", PVOID),
("CSDVersion", UNICODE_STRING),
("ActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("ProcessAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("SystemDefaultActivationContextData", PVOID), # ACTIVATION_CONTEXT_DATA
("SystemAssemblyStorageMap", PVOID), # ASSEMBLY_STORAGE_MAP
("MinimumStackCommit", DWORD),
("FlsCallback", PVOID), # PFLS_CALLBACK_INFO
("FlsListHead", LIST_ENTRY),
("FlsBitmap", PVOID),
("FlsBitmapBits", DWORD * 4),
("FlsHighIndex", DWORD),
("WerRegistrationData", PVOID),
("WerShipAssertPtr", PVOID),
("pContextData", PVOID),
("pImageHeaderHash", PVOID),
]
def __get_UserSharedInfoPtr(self):
return self.KernelCallbackTable
def __set_UserSharedInfoPtr(self, value):
self.KernelCallbackTable = value
UserSharedInfoPtr = property(__get_UserSharedInfoPtr, __set_UserSharedInfoPtr)
# Use the correct PEB structure definition.
# Defaults to the latest Windows version.
class PEB(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _PEB_NT._pack_
_fields_ = _PEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _PEB_2000._pack_
_fields_ = _PEB_2000._fields_
elif os == 'Windows XP':
_fields_ = _PEB_XP._fields_
elif os == 'Windows XP (64 bits)':
_fields_ = _PEB_XP_64._fields_
elif os == 'Windows 2003':
_fields_ = _PEB_2003._fields_
elif os == 'Windows 2003 (64 bits)':
_fields_ = _PEB_2003_64._fields_
elif os == 'Windows 2003 R2':
_fields_ = _PEB_2003_R2._fields_
elif os == 'Windows 2003 R2 (64 bits)':
_fields_ = _PEB_2003_R2_64._fields_
elif os == 'Windows 2008':
_fields_ = _PEB_2008._fields_
elif os == 'Windows 2008 (64 bits)':
_fields_ = _PEB_2008_64._fields_
elif os == 'Windows 2008 R2':
_fields_ = _PEB_2008_R2._fields_
elif os == 'Windows 2008 R2 (64 bits)':
_fields_ = _PEB_2008_R2_64._fields_
elif os == 'Windows Vista':
_fields_ = _PEB_Vista._fields_
elif os == 'Windows Vista (64 bits)':
_fields_ = _PEB_Vista_64._fields_
elif os == 'Windows 7':
_fields_ = _PEB_W7._fields_
elif os == 'Windows 7 (64 bits)':
_fields_ = _PEB_W7_64._fields_
elif sizeof(SIZE_T) == sizeof(DWORD):
_fields_ = _PEB_W7._fields_
else:
_fields_ = _PEB_W7_64._fields_
PPEB = POINTER(PEB)
# PEB structure for WOW64 processes.
class PEB_32(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _PEB_NT._pack_
_fields_ = _PEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _PEB_2000._pack_
_fields_ = _PEB_2000._fields_
elif os.startswith('Windows XP'):
_fields_ = _PEB_XP._fields_
elif os.startswith('Windows 2003 R2'):
_fields_ = _PEB_2003_R2._fields_
elif os.startswith('Windows 2003'):
_fields_ = _PEB_2003._fields_
elif os.startswith('Windows 2008 R2'):
_fields_ = _PEB_2008_R2._fields_
elif os.startswith('Windows 2008'):
_fields_ = _PEB_2008._fields_
elif os.startswith('Windows Vista'):
_fields_ = _PEB_Vista._fields_
else: #if os.startswith('Windows 7'):
_fields_ = _PEB_W7._fields_
# from https://vmexplorer.svn.codeplex.com/svn/VMExplorer/src/Win32/Threads.cs
#
# [StructLayout (LayoutKind.Sequential, Size = 0x0C)]
# public struct Wx86ThreadState
# {
# public IntPtr CallBx86Eip; // Ptr32 to Uint4B
# public IntPtr DeallocationCpu; // Ptr32 to Void
# public Byte UseKnownWx86Dll; // UChar
# public Byte OleStubInvoked; // Char
# };
class Wx86ThreadState(Structure):
_fields_ = [
("CallBx86Eip", PVOID),
("DeallocationCpu", PVOID),
("UseKnownWx86Dll", UCHAR),
("OleStubInvoked", CHAR),
]
# ntdll!_RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x000 Previous : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x008 ActivationContext : Ptr64 _ACTIVATION_CONTEXT
# +0x010 Flags : Uint4B
class RTL_ACTIVATION_CONTEXT_STACK_FRAME(Structure):
_fields_ = [
("Previous", PVOID),
("ActivationContext", PVOID),
("Flags", DWORD),
]
# ntdll!_ACTIVATION_CONTEXT_STACK
# +0x000 ActiveFrame : Ptr64 _RTL_ACTIVATION_CONTEXT_STACK_FRAME
# +0x008 FrameListCache : _LIST_ENTRY
# +0x018 Flags : Uint4B
# +0x01c NextCookieSequenceNumber : Uint4B
# +0x020 StackId : Uint4B
class ACTIVATION_CONTEXT_STACK(Structure):
_fields_ = [
("ActiveFrame", PVOID),
("FrameListCache", LIST_ENTRY),
("Flags", DWORD),
("NextCookieSequenceNumber", DWORD),
("StackId", DWORD),
]
# typedef struct _PROCESSOR_NUMBER {
# WORD Group;
# BYTE Number;
# BYTE Reserved;
# }PROCESSOR_NUMBER, *PPROCESSOR_NUMBER;
class PROCESSOR_NUMBER(Structure):
_fields_ = [
("Group", WORD),
("Number", BYTE),
("Reserved", BYTE),
]
# from http://www.nirsoft.net/kernel_struct/vista/NT_TIB.html
#
# typedef struct _NT_TIB
# {
# PEXCEPTION_REGISTRATION_RECORD ExceptionList;
# PVOID StackBase;
# PVOID StackLimit;
# PVOID SubSystemTib;
# union
# {
# PVOID FiberData;
# ULONG Version;
# };
# PVOID ArbitraryUserPointer;
# PNT_TIB Self;
# } NT_TIB, *PNT_TIB;
class _NT_TIB_UNION(Union):
_fields_ = [
("FiberData", PVOID),
("Version", ULONG),
]
class NT_TIB(Structure):
_fields_ = [
("ExceptionList", PVOID), # PEXCEPTION_REGISTRATION_RECORD
("StackBase", PVOID),
("StackLimit", PVOID),
("SubSystemTib", PVOID),
("u", _NT_TIB_UNION),
("ArbitraryUserPointer", PVOID),
("Self", PVOID), # PNTTIB
]
def __get_FiberData(self):
return self.u.FiberData
def __set_FiberData(self, value):
self.u.FiberData = value
FiberData = property(__get_FiberData, __set_FiberData)
def __get_Version(self):
return self.u.Version
def __set_Version(self, value):
self.u.Version = value
Version = property(__get_Version, __set_Version)
PNTTIB = POINTER(NT_TIB)
# From http://www.nirsoft.net/kernel_struct/vista/EXCEPTION_REGISTRATION_RECORD.html
#
# typedef struct _EXCEPTION_REGISTRATION_RECORD
# {
# PEXCEPTION_REGISTRATION_RECORD Next;
# PEXCEPTION_DISPOSITION Handler;
# } EXCEPTION_REGISTRATION_RECORD, *PEXCEPTION_REGISTRATION_RECORD;
class EXCEPTION_REGISTRATION_RECORD(Structure):
pass
EXCEPTION_DISPOSITION = DWORD
##PEXCEPTION_DISPOSITION = POINTER(EXCEPTION_DISPOSITION)
##PEXCEPTION_REGISTRATION_RECORD = POINTER(EXCEPTION_REGISTRATION_RECORD)
PEXCEPTION_DISPOSITION = PVOID
PEXCEPTION_REGISTRATION_RECORD = PVOID
EXCEPTION_REGISTRATION_RECORD._fields_ = [
("Next", PEXCEPTION_REGISTRATION_RECORD),
("Handler", PEXCEPTION_DISPOSITION),
]
##PPEB = POINTER(PEB)
PPEB = PVOID
# From http://www.nirsoft.net/kernel_struct/vista/GDI_TEB_BATCH.html
#
# typedef struct _GDI_TEB_BATCH
# {
# ULONG Offset;
# ULONG HDC;
# ULONG Buffer[310];
# } GDI_TEB_BATCH, *PGDI_TEB_BATCH;
class GDI_TEB_BATCH(Structure):
_fields_ = [
("Offset", ULONG),
("HDC", ULONG),
("Buffer", ULONG * 310),
]
# ntdll!_TEB_ACTIVE_FRAME_CONTEXT
# +0x000 Flags : Uint4B
# +0x008 FrameName : Ptr64 Char
class TEB_ACTIVE_FRAME_CONTEXT(Structure):
_fields_ = [
("Flags", DWORD),
("FrameName", LPVOID), # LPCHAR
]
PTEB_ACTIVE_FRAME_CONTEXT = POINTER(TEB_ACTIVE_FRAME_CONTEXT)
# ntdll!_TEB_ACTIVE_FRAME
# +0x000 Flags : Uint4B
# +0x008 Previous : Ptr64 _TEB_ACTIVE_FRAME
# +0x010 Context : Ptr64 _TEB_ACTIVE_FRAME_CONTEXT
class TEB_ACTIVE_FRAME(Structure):
_fields_ = [
("Flags", DWORD),
("Previous", LPVOID), # PTEB_ACTIVE_FRAME
("Context", LPVOID), # PTEB_ACTIVE_FRAME_CONTEXT
]
PTEB_ACTIVE_FRAME = POINTER(TEB_ACTIVE_FRAME)
# SameTebFlags
DbgSafeThunkCall = 1 << 0
DbgInDebugPrint = 1 << 1
DbgHasFiberData = 1 << 2
DbgSkipThreadAttach = 1 << 3
DbgWerInShipAssertCode = 1 << 4
DbgRanProcessInit = 1 << 5
DbgClonedThread = 1 << 6
DbgSuppressDebugMsg = 1 << 7
RtlDisableUserStackWalk = 1 << 8
RtlExceptionAttached = 1 << 9
RtlInitialThread = 1 << 10
# XXX This is quite wrong :P
class _TEB_NT(Structure):
_pack_ = 4
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PPEB),
("LastErrorValue", ULONG),
("CountOfOwnedCriticalSections", ULONG),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", ULONG * 26),
("UserReserved", ULONG * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", ULONG),
("FpSoftwareStatusRegister", ULONG),
("SystemReserved1", PVOID * 54),
("Spare1", PVOID),
("ExceptionCode", ULONG),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", ULONG * 36),
("TxFsContext", ULONG),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", PVOID),
("GdiClientPID", ULONG),
("GdiClientTID", ULONG),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", PVOID * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", ULONG * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorDisabled", ULONG),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", ULONG),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", ULONG),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", ULONG),
("StackCommit", PVOID),
("StackCommitMax", PVOID),
("StackReserved", PVOID),
]
# not really, but "dt _TEB" in w2k isn't working for me :(
_TEB_2000 = _TEB_NT
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStack : _ACTIVATION_CONTEXT_STACK
# +0x1bc SpareBytes1 : [24] UChar
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Uint2B
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorsAreDisabled : Uint4B
# +0xf2c Instrumentation : [16] Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 InDbgPrint : UChar
# +0xf75 FreeStackOnTermination : UChar
# +0xf76 HasFiberData : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 Spare3 : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 Wx86Thread : _Wx86ThreadState
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 SafeThunkCall : UChar
# +0xfb5 BooleanSpare : [3] UChar
class _TEB_XP(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorsAreDisabled", DWORD),
("Instrumentation", PVOID * 16),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("Spare3", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("Wx86Thread", Wx86ThreadState),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes1 : [28] UChar
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Uint2B
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [14] Ptr64 Void
# +0x1728 SubProcessTag : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 InDbgPrint : UChar
# +0x1745 FreeStackOnTermination : UChar
# +0x1746 HasFiberData : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SparePointer1 : Uint8B
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 SoftPatchPtr2 : Uint8B
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 ImpersonationLocale : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 SafeThunkCall : UChar
# +0x17d1 BooleanSpare : [3] UChar
class _TEB_XP_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", PVOID),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 28),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", QWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 14),
("SubProcessTag", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SparePointer1", PVOID),
("SoftPatchPtr1", PVOID),
("SoftPatchPtr2", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes1 : [40] UChar
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Uint2B
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [14] Ptr32 Void
# +0xf64 SubProcessTag : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 InDbgPrint : UChar
# +0xf75 FreeStackOnTermination : UChar
# +0xf76 HasFiberData : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SparePointer1 : Uint4B
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 SoftPatchPtr2 : Uint4B
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 SafeThunkCall : UChar
# +0xfb9 BooleanSpare : [3] UChar
class _TEB_2003(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 40),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 14),
("SubProcessTag", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("InDbgPrint", BOOLEAN),
("FreeStackOnTermination", BOOLEAN),
("HasFiberData", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SparePointer1", PVOID),
("SoftPatchPtr1", PVOID),
("SoftPatchPtr2", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("SafeThunkCall", BOOLEAN),
("BooleanSpare", BOOLEAN * 3),
]
_TEB_2003_64 = _TEB_XP_64
_TEB_2003_R2 = _TEB_2003
_TEB_2003_R2_64 = _TEB_2003_64
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes1 : [36] UChar
# +0x1d0 TxFsContext : Uint4B
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Wchar
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [9] Ptr32 Void
# +0xf50 ActivityId : _GUID
# +0xf60 SubProcessTag : Ptr32 Void
# +0xf64 EtwLocalData : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 SpareBool0 : UChar
# +0xf75 SpareBool1 : UChar
# +0xf76 SpareBool2 : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SavedPriorityState : Ptr32 Void
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 ThreadPoolData : Ptr32 Void
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 ImpersonationLocale : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 PreferredLanguages : Ptr32 Void
# +0xfbc UserPrefLanguages : Ptr32 Void
# +0xfc0 MergedPrefLanguages : Ptr32 Void
# +0xfc4 MuiImpersonation : Uint4B
# +0xfc8 CrossTebFlags : Uint2B
# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits
# +0xfca SameTebFlags : Uint2B
# +0xfca DbgSafeThunkCall : Pos 0, 1 Bit
# +0xfca DbgInDebugPrint : Pos 1, 1 Bit
# +0xfca DbgHasFiberData : Pos 2, 1 Bit
# +0xfca DbgSkipThreadAttach : Pos 3, 1 Bit
# +0xfca DbgWerInShipAssertCode : Pos 4, 1 Bit
# +0xfca DbgRanProcessInit : Pos 5, 1 Bit
# +0xfca DbgClonedThread : Pos 6, 1 Bit
# +0xfca DbgSuppressDebugMsg : Pos 7, 1 Bit
# +0xfca RtlDisableUserStackWalk : Pos 8, 1 Bit
# +0xfca RtlExceptionAttached : Pos 9, 1 Bit
# +0xfca SpareSameTebBits : Pos 10, 6 Bits
# +0xfcc TxnScopeEnterCallback : Ptr32 Void
# +0xfd0 TxnScopeExitCallback : Ptr32 Void
# +0xfd4 TxnScopeContext : Ptr32 Void
# +0xfd8 LockCount : Uint4B
# +0xfdc ProcessRundown : Uint4B
# +0xfe0 LastSwitchTime : Uint8B
# +0xfe8 TotalSwitchOutTime : Uint8B
# +0xff0 WaitReasonBitMap : _LARGE_INTEGER
class _TEB_2008(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 36),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("ProcessRundown", DWORD),
("LastSwitchTime", QWORD),
("TotalSwitchOutTime", QWORD),
("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes1 : [24] UChar
# +0x2e8 TxFsContext : Uint4B
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Wchar
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [11] Ptr64 Void
# +0x1710 ActivityId : _GUID
# +0x1720 SubProcessTag : Ptr64 Void
# +0x1728 EtwLocalData : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 SpareBool0 : UChar
# +0x1745 SpareBool1 : UChar
# +0x1746 SpareBool2 : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SavedPriorityState : Ptr64 Void
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 ThreadPoolData : Ptr64 Void
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 ImpersonationLocale : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 PreferredLanguages : Ptr64 Void
# +0x17d8 UserPrefLanguages : Ptr64 Void
# +0x17e0 MergedPrefLanguages : Ptr64 Void
# +0x17e8 MuiImpersonation : Uint4B
# +0x17ec CrossTebFlags : Uint2B
# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits
# +0x17ee SameTebFlags : Uint2B
# +0x17ee DbgSafeThunkCall : Pos 0, 1 Bit
# +0x17ee DbgInDebugPrint : Pos 1, 1 Bit
# +0x17ee DbgHasFiberData : Pos 2, 1 Bit
# +0x17ee DbgSkipThreadAttach : Pos 3, 1 Bit
# +0x17ee DbgWerInShipAssertCode : Pos 4, 1 Bit
# +0x17ee DbgRanProcessInit : Pos 5, 1 Bit
# +0x17ee DbgClonedThread : Pos 6, 1 Bit
# +0x17ee DbgSuppressDebugMsg : Pos 7, 1 Bit
# +0x17ee RtlDisableUserStackWalk : Pos 8, 1 Bit
# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit
# +0x17ee SpareSameTebBits : Pos 10, 6 Bits
# +0x17f0 TxnScopeEnterCallback : Ptr64 Void
# +0x17f8 TxnScopeExitCallback : Ptr64 Void
# +0x1800 TxnScopeContext : Ptr64 Void
# +0x1808 LockCount : Uint4B
# +0x180c ProcessRundown : Uint4B
# +0x1810 LastSwitchTime : Uint8B
# +0x1818 TotalSwitchOutTime : Uint8B
# +0x1820 WaitReasonBitMap : _LARGE_INTEGER
class _TEB_2008_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes1", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", QWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 11),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("SpareBool0", BOOLEAN),
("SpareBool1", BOOLEAN),
("SpareBool2", BOOLEAN),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("ImpersonationLocale", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("ProcessRundown", DWORD),
("LastSwitchTime", QWORD),
("TotalSwitchOutTime", QWORD),
("WaitReasonBitMap", LONGLONG), # LARGE_INTEGER
]
# +0x000 NtTib : _NT_TIB
# +0x01c EnvironmentPointer : Ptr32 Void
# +0x020 ClientId : _CLIENT_ID
# +0x028 ActiveRpcHandle : Ptr32 Void
# +0x02c ThreadLocalStoragePointer : Ptr32 Void
# +0x030 ProcessEnvironmentBlock : Ptr32 _PEB
# +0x034 LastErrorValue : Uint4B
# +0x038 CountOfOwnedCriticalSections : Uint4B
# +0x03c CsrClientThread : Ptr32 Void
# +0x040 Win32ThreadInfo : Ptr32 Void
# +0x044 User32Reserved : [26] Uint4B
# +0x0ac UserReserved : [5] Uint4B
# +0x0c0 WOW32Reserved : Ptr32 Void
# +0x0c4 CurrentLocale : Uint4B
# +0x0c8 FpSoftwareStatusRegister : Uint4B
# +0x0cc SystemReserved1 : [54] Ptr32 Void
# +0x1a4 ExceptionCode : Int4B
# +0x1a8 ActivationContextStackPointer : Ptr32 _ACTIVATION_CONTEXT_STACK
# +0x1ac SpareBytes : [36] UChar
# +0x1d0 TxFsContext : Uint4B
# +0x1d4 GdiTebBatch : _GDI_TEB_BATCH
# +0x6b4 RealClientId : _CLIENT_ID
# +0x6bc GdiCachedProcessHandle : Ptr32 Void
# +0x6c0 GdiClientPID : Uint4B
# +0x6c4 GdiClientTID : Uint4B
# +0x6c8 GdiThreadLocalInfo : Ptr32 Void
# +0x6cc Win32ClientInfo : [62] Uint4B
# +0x7c4 glDispatchTable : [233] Ptr32 Void
# +0xb68 glReserved1 : [29] Uint4B
# +0xbdc glReserved2 : Ptr32 Void
# +0xbe0 glSectionInfo : Ptr32 Void
# +0xbe4 glSection : Ptr32 Void
# +0xbe8 glTable : Ptr32 Void
# +0xbec glCurrentRC : Ptr32 Void
# +0xbf0 glContext : Ptr32 Void
# +0xbf4 LastStatusValue : Uint4B
# +0xbf8 StaticUnicodeString : _UNICODE_STRING
# +0xc00 StaticUnicodeBuffer : [261] Wchar
# +0xe0c DeallocationStack : Ptr32 Void
# +0xe10 TlsSlots : [64] Ptr32 Void
# +0xf10 TlsLinks : _LIST_ENTRY
# +0xf18 Vdm : Ptr32 Void
# +0xf1c ReservedForNtRpc : Ptr32 Void
# +0xf20 DbgSsReserved : [2] Ptr32 Void
# +0xf28 HardErrorMode : Uint4B
# +0xf2c Instrumentation : [9] Ptr32 Void
# +0xf50 ActivityId : _GUID
# +0xf60 SubProcessTag : Ptr32 Void
# +0xf64 EtwLocalData : Ptr32 Void
# +0xf68 EtwTraceData : Ptr32 Void
# +0xf6c WinSockData : Ptr32 Void
# +0xf70 GdiBatchCount : Uint4B
# +0xf74 CurrentIdealProcessor : _PROCESSOR_NUMBER
# +0xf74 IdealProcessorValue : Uint4B
# +0xf74 ReservedPad0 : UChar
# +0xf75 ReservedPad1 : UChar
# +0xf76 ReservedPad2 : UChar
# +0xf77 IdealProcessor : UChar
# +0xf78 GuaranteedStackBytes : Uint4B
# +0xf7c ReservedForPerf : Ptr32 Void
# +0xf80 ReservedForOle : Ptr32 Void
# +0xf84 WaitingOnLoaderLock : Uint4B
# +0xf88 SavedPriorityState : Ptr32 Void
# +0xf8c SoftPatchPtr1 : Uint4B
# +0xf90 ThreadPoolData : Ptr32 Void
# +0xf94 TlsExpansionSlots : Ptr32 Ptr32 Void
# +0xf98 MuiGeneration : Uint4B
# +0xf9c IsImpersonating : Uint4B
# +0xfa0 NlsCache : Ptr32 Void
# +0xfa4 pShimData : Ptr32 Void
# +0xfa8 HeapVirtualAffinity : Uint4B
# +0xfac CurrentTransactionHandle : Ptr32 Void
# +0xfb0 ActiveFrame : Ptr32 _TEB_ACTIVE_FRAME
# +0xfb4 FlsData : Ptr32 Void
# +0xfb8 PreferredLanguages : Ptr32 Void
# +0xfbc UserPrefLanguages : Ptr32 Void
# +0xfc0 MergedPrefLanguages : Ptr32 Void
# +0xfc4 MuiImpersonation : Uint4B
# +0xfc8 CrossTebFlags : Uint2B
# +0xfc8 SpareCrossTebBits : Pos 0, 16 Bits
# +0xfca SameTebFlags : Uint2B
# +0xfca SafeThunkCall : Pos 0, 1 Bit
# +0xfca InDebugPrint : Pos 1, 1 Bit
# +0xfca HasFiberData : Pos 2, 1 Bit
# +0xfca SkipThreadAttach : Pos 3, 1 Bit
# +0xfca WerInShipAssertCode : Pos 4, 1 Bit
# +0xfca RanProcessInit : Pos 5, 1 Bit
# +0xfca ClonedThread : Pos 6, 1 Bit
# +0xfca SuppressDebugMsg : Pos 7, 1 Bit
# +0xfca DisableUserStackWalk : Pos 8, 1 Bit
# +0xfca RtlExceptionAttached : Pos 9, 1 Bit
# +0xfca InitialThread : Pos 10, 1 Bit
# +0xfca SpareSameTebBits : Pos 11, 5 Bits
# +0xfcc TxnScopeEnterCallback : Ptr32 Void
# +0xfd0 TxnScopeExitCallback : Ptr32 Void
# +0xfd4 TxnScopeContext : Ptr32 Void
# +0xfd8 LockCount : Uint4B
# +0xfdc SpareUlong0 : Uint4B
# +0xfe0 ResourceRetValue : Ptr32 Void
class _TEB_2008_R2(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes", UCHAR * 36),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", DWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 9),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("CurrentIdealProcessor", PROCESSOR_NUMBER),
("IdealProcessorValue", DWORD),
("ReservedPad0", UCHAR),
("ReservedPad1", UCHAR),
("ReservedPad2", UCHAR),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr32 Ptr32 Void
("MuiGeneration", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("SpareUlong0", ULONG),
("ResourceRetValue", PVOID),
]
# +0x000 NtTib : _NT_TIB
# +0x038 EnvironmentPointer : Ptr64 Void
# +0x040 ClientId : _CLIENT_ID
# +0x050 ActiveRpcHandle : Ptr64 Void
# +0x058 ThreadLocalStoragePointer : Ptr64 Void
# +0x060 ProcessEnvironmentBlock : Ptr64 _PEB
# +0x068 LastErrorValue : Uint4B
# +0x06c CountOfOwnedCriticalSections : Uint4B
# +0x070 CsrClientThread : Ptr64 Void
# +0x078 Win32ThreadInfo : Ptr64 Void
# +0x080 User32Reserved : [26] Uint4B
# +0x0e8 UserReserved : [5] Uint4B
# +0x100 WOW32Reserved : Ptr64 Void
# +0x108 CurrentLocale : Uint4B
# +0x10c FpSoftwareStatusRegister : Uint4B
# +0x110 SystemReserved1 : [54] Ptr64 Void
# +0x2c0 ExceptionCode : Int4B
# +0x2c8 ActivationContextStackPointer : Ptr64 _ACTIVATION_CONTEXT_STACK
# +0x2d0 SpareBytes : [24] UChar
# +0x2e8 TxFsContext : Uint4B
# +0x2f0 GdiTebBatch : _GDI_TEB_BATCH
# +0x7d8 RealClientId : _CLIENT_ID
# +0x7e8 GdiCachedProcessHandle : Ptr64 Void
# +0x7f0 GdiClientPID : Uint4B
# +0x7f4 GdiClientTID : Uint4B
# +0x7f8 GdiThreadLocalInfo : Ptr64 Void
# +0x800 Win32ClientInfo : [62] Uint8B
# +0x9f0 glDispatchTable : [233] Ptr64 Void
# +0x1138 glReserved1 : [29] Uint8B
# +0x1220 glReserved2 : Ptr64 Void
# +0x1228 glSectionInfo : Ptr64 Void
# +0x1230 glSection : Ptr64 Void
# +0x1238 glTable : Ptr64 Void
# +0x1240 glCurrentRC : Ptr64 Void
# +0x1248 glContext : Ptr64 Void
# +0x1250 LastStatusValue : Uint4B
# +0x1258 StaticUnicodeString : _UNICODE_STRING
# +0x1268 StaticUnicodeBuffer : [261] Wchar
# +0x1478 DeallocationStack : Ptr64 Void
# +0x1480 TlsSlots : [64] Ptr64 Void
# +0x1680 TlsLinks : _LIST_ENTRY
# +0x1690 Vdm : Ptr64 Void
# +0x1698 ReservedForNtRpc : Ptr64 Void
# +0x16a0 DbgSsReserved : [2] Ptr64 Void
# +0x16b0 HardErrorMode : Uint4B
# +0x16b8 Instrumentation : [11] Ptr64 Void
# +0x1710 ActivityId : _GUID
# +0x1720 SubProcessTag : Ptr64 Void
# +0x1728 EtwLocalData : Ptr64 Void
# +0x1730 EtwTraceData : Ptr64 Void
# +0x1738 WinSockData : Ptr64 Void
# +0x1740 GdiBatchCount : Uint4B
# +0x1744 CurrentIdealProcessor : _PROCESSOR_NUMBER
# +0x1744 IdealProcessorValue : Uint4B
# +0x1744 ReservedPad0 : UChar
# +0x1745 ReservedPad1 : UChar
# +0x1746 ReservedPad2 : UChar
# +0x1747 IdealProcessor : UChar
# +0x1748 GuaranteedStackBytes : Uint4B
# +0x1750 ReservedForPerf : Ptr64 Void
# +0x1758 ReservedForOle : Ptr64 Void
# +0x1760 WaitingOnLoaderLock : Uint4B
# +0x1768 SavedPriorityState : Ptr64 Void
# +0x1770 SoftPatchPtr1 : Uint8B
# +0x1778 ThreadPoolData : Ptr64 Void
# +0x1780 TlsExpansionSlots : Ptr64 Ptr64 Void
# +0x1788 DeallocationBStore : Ptr64 Void
# +0x1790 BStoreLimit : Ptr64 Void
# +0x1798 MuiGeneration : Uint4B
# +0x179c IsImpersonating : Uint4B
# +0x17a0 NlsCache : Ptr64 Void
# +0x17a8 pShimData : Ptr64 Void
# +0x17b0 HeapVirtualAffinity : Uint4B
# +0x17b8 CurrentTransactionHandle : Ptr64 Void
# +0x17c0 ActiveFrame : Ptr64 _TEB_ACTIVE_FRAME
# +0x17c8 FlsData : Ptr64 Void
# +0x17d0 PreferredLanguages : Ptr64 Void
# +0x17d8 UserPrefLanguages : Ptr64 Void
# +0x17e0 MergedPrefLanguages : Ptr64 Void
# +0x17e8 MuiImpersonation : Uint4B
# +0x17ec CrossTebFlags : Uint2B
# +0x17ec SpareCrossTebBits : Pos 0, 16 Bits
# +0x17ee SameTebFlags : Uint2B
# +0x17ee SafeThunkCall : Pos 0, 1 Bit
# +0x17ee InDebugPrint : Pos 1, 1 Bit
# +0x17ee HasFiberData : Pos 2, 1 Bit
# +0x17ee SkipThreadAttach : Pos 3, 1 Bit
# +0x17ee WerInShipAssertCode : Pos 4, 1 Bit
# +0x17ee RanProcessInit : Pos 5, 1 Bit
# +0x17ee ClonedThread : Pos 6, 1 Bit
# +0x17ee SuppressDebugMsg : Pos 7, 1 Bit
# +0x17ee DisableUserStackWalk : Pos 8, 1 Bit
# +0x17ee RtlExceptionAttached : Pos 9, 1 Bit
# +0x17ee InitialThread : Pos 10, 1 Bit
# +0x17ee SpareSameTebBits : Pos 11, 5 Bits
# +0x17f0 TxnScopeEnterCallback : Ptr64 Void
# +0x17f8 TxnScopeExitCallback : Ptr64 Void
# +0x1800 TxnScopeContext : Ptr64 Void
# +0x1808 LockCount : Uint4B
# +0x180c SpareUlong0 : Uint4B
# +0x1810 ResourceRetValue : Ptr64 Void
class _TEB_2008_R2_64(Structure):
_pack_ = 8
_fields_ = [
("NtTib", NT_TIB),
("EnvironmentPointer", PVOID),
("ClientId", CLIENT_ID),
("ActiveRpcHandle", HANDLE),
("ThreadLocalStoragePointer", PVOID),
("ProcessEnvironmentBlock", PVOID), # PPEB
("LastErrorValue", DWORD),
("CountOfOwnedCriticalSections", DWORD),
("CsrClientThread", PVOID),
("Win32ThreadInfo", PVOID),
("User32Reserved", DWORD * 26),
("UserReserved", DWORD * 5),
("WOW32Reserved", PVOID), # ptr to wow64cpu!X86SwitchTo64BitMode
("CurrentLocale", DWORD),
("FpSoftwareStatusRegister", DWORD),
("SystemReserved1", PVOID * 54),
("ExceptionCode", SDWORD),
("ActivationContextStackPointer", PVOID), # PACTIVATION_CONTEXT_STACK
("SpareBytes", UCHAR * 24),
("TxFsContext", DWORD),
("GdiTebBatch", GDI_TEB_BATCH),
("RealClientId", CLIENT_ID),
("GdiCachedProcessHandle", HANDLE),
("GdiClientPID", DWORD),
("GdiClientTID", DWORD),
("GdiThreadLocalInfo", PVOID),
("Win32ClientInfo", DWORD * 62),
("glDispatchTable", PVOID * 233),
("glReserved1", QWORD * 29),
("glReserved2", PVOID),
("glSectionInfo", PVOID),
("glSection", PVOID),
("glTable", PVOID),
("glCurrentRC", PVOID),
("glContext", PVOID),
("LastStatusValue", NTSTATUS),
("StaticUnicodeString", UNICODE_STRING),
("StaticUnicodeBuffer", WCHAR * 261),
("DeallocationStack", PVOID),
("TlsSlots", PVOID * 64),
("TlsLinks", LIST_ENTRY),
("Vdm", PVOID),
("ReservedForNtRpc", PVOID),
("DbgSsReserved", PVOID * 2),
("HardErrorMode", DWORD),
("Instrumentation", PVOID * 11),
("ActivityId", GUID),
("SubProcessTag", PVOID),
("EtwLocalData", PVOID),
("EtwTraceData", PVOID),
("WinSockData", PVOID),
("GdiBatchCount", DWORD),
("CurrentIdealProcessor", PROCESSOR_NUMBER),
("IdealProcessorValue", DWORD),
("ReservedPad0", UCHAR),
("ReservedPad1", UCHAR),
("ReservedPad2", UCHAR),
("IdealProcessor", UCHAR),
("GuaranteedStackBytes", DWORD),
("ReservedForPerf", PVOID),
("ReservedForOle", PVOID),
("WaitingOnLoaderLock", DWORD),
("SavedPriorityState", PVOID),
("SoftPatchPtr1", PVOID),
("ThreadPoolData", PVOID),
("TlsExpansionSlots", PVOID), # Ptr64 Ptr64 Void
("DeallocationBStore", PVOID),
("BStoreLimit", PVOID),
("MuiGeneration", DWORD),
("IsImpersonating", BOOL),
("NlsCache", PVOID),
("pShimData", PVOID),
("HeapVirtualAffinity", DWORD),
("CurrentTransactionHandle", HANDLE),
("ActiveFrame", PVOID), # PTEB_ACTIVE_FRAME
("FlsData", PVOID),
("PreferredLanguages", PVOID),
("UserPrefLanguages", PVOID),
("MergedPrefLanguages", PVOID),
("MuiImpersonation", BOOL),
("CrossTebFlags", WORD),
("SameTebFlags", WORD),
("TxnScopeEnterCallback", PVOID),
("TxnScopeExitCallback", PVOID),
("TxnScopeContext", PVOID),
("LockCount", DWORD),
("SpareUlong0", ULONG),
("ResourceRetValue", PVOID),
]
_TEB_Vista = _TEB_2008
_TEB_Vista_64 = _TEB_2008_64
_TEB_W7 = _TEB_2008_R2
_TEB_W7_64 = _TEB_2008_R2_64
# Use the correct TEB structure definition.
# Defaults to the latest Windows version.
class TEB(Structure):
_pack_ = 8
if os == 'Windows NT':
_pack_ = _TEB_NT._pack_
_fields_ = _TEB_NT._fields_
elif os == 'Windows 2000':
_pack_ = _TEB_2000._pack_
_fields_ = _TEB_2000._fields_
elif os == 'Windows XP':
_fields_ = _TEB_XP._fields_
elif os == 'Windows XP (64 bits)':
_fields_ = _TEB_XP_64._fields_
elif os == 'Windows 2003':
_fields_ = _TEB_2003._fields_
elif os == 'Windows 2003 (64 bits)':
_fields_ = _TEB_2003_64._fields_
elif os == 'Windows 2008':
_fields_ = _TEB_2008._fields_
elif os == 'Windows 2008 (64 bits)':
_fields_ = _TEB_2008_64._fields_
elif os == 'Windows 2003 R2':
_fields_ = _TEB_2003_R2._fields_
elif os == 'Windows 2003 R2 (64 bits)':
_fields_ = _TEB_2003_R2_64._fields_
elif os == 'Windows 2008 R2':
_fields_ = _TEB_2008_R2._fields_
elif os == 'Windows 2008 R2 (64 bits)':
_fields_ = _TEB_2008_R2_64._fields_
elif os == 'Windows Vista':
_fields_ = _TEB_Vista._fields_
elif os == 'Windows Vista (64 bits)':
_fields_ = _TEB_Vista_64._fields_
elif os == 'Windows 7':
_fields_ = _TEB_W7._fields_
elif os == 'Windows 7 (64 bits)':
_fields_ = _TEB_W7_64._fields_
elif sizeof(SIZE_T) == sizeof(DWORD):
_fields_ = _TEB_W7._fields_
else:
_fields_ = _TEB_W7_64._fields_
PTEB = POINTER(TEB)
#==============================================================================
# This calculates the list of exported symbols.
_all = set(vars().keys()).difference(_all)
__all__ = [_x for _x in _all if not _x.startswith('_')]
__all__.sort()
#==============================================================================
| apache-2.0 |
yuanagain/seniorthesis | venv/lib/python2.7/site-packages/wheel/tool/__init__.py | 93 | 13217 | """
Wheel command-line utility.
"""
import os
import hashlib
import sys
import json
from glob import iglob
from .. import signatures
from ..util import (urlsafe_b64decode, urlsafe_b64encode, native, binary,
matches_requirement)
from ..install import WheelFile, VerifyingZipFile
from ..paths import get_install_command
def require_pkgresources(name):
try:
import pkg_resources
except ImportError:
raise RuntimeError("'{0}' needs pkg_resources (part of setuptools).".format(name))
import argparse
class WheelError(Exception): pass
# For testability
def get_keyring():
try:
from ..signatures import keys
import keyring
assert keyring.get_keyring().priority
except (ImportError, AssertionError):
raise WheelError("Install wheel[signatures] (requires keyring, keyrings.alt, pyxdg) for signatures.")
return keys.WheelKeys, keyring
def keygen(get_keyring=get_keyring):
"""Generate a public/private key pair."""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wk = WheelKeys().load()
keypair = ed25519ll.crypto_sign_keypair()
vk = native(urlsafe_b64encode(keypair.vk))
sk = native(urlsafe_b64encode(keypair.sk))
kr = keyring.get_keyring()
kr.set_password("wheel", vk, sk)
sys.stdout.write("Created Ed25519 keypair with vk={0}\n".format(vk))
sys.stdout.write("in {0!r}\n".format(kr))
sk2 = kr.get_password('wheel', vk)
if sk2 != sk:
raise WheelError("Keyring is broken. Could not retrieve secret key.")
sys.stdout.write("Trusting {0} to sign and verify all packages.\n".format(vk))
wk.add_signer('+', vk)
wk.trust('+', vk)
wk.save()
def sign(wheelfile, replace=False, get_keyring=get_keyring):
"""Sign a wheel"""
WheelKeys, keyring = get_keyring()
ed25519ll = signatures.get_ed25519ll()
wf = WheelFile(wheelfile, append=True)
wk = WheelKeys().load()
name = wf.parsed_filename.group('name')
sign_with = wk.signers(name)[0]
sys.stdout.write("Signing {0} with {1}\n".format(name, sign_with[1]))
vk = sign_with[1]
kr = keyring.get_keyring()
sk = kr.get_password('wheel', vk)
keypair = ed25519ll.Keypair(urlsafe_b64decode(binary(vk)),
urlsafe_b64decode(binary(sk)))
record_name = wf.distinfo_name + '/RECORD'
sig_name = wf.distinfo_name + '/RECORD.jws'
if sig_name in wf.zipfile.namelist():
raise WheelError("Wheel is already signed.")
record_data = wf.zipfile.read(record_name)
payload = {"hash":"sha256=" + native(urlsafe_b64encode(hashlib.sha256(record_data).digest()))}
sig = signatures.sign(payload, keypair)
wf.zipfile.writestr(sig_name, json.dumps(sig, sort_keys=True))
wf.zipfile.close()
def unsign(wheelfile):
"""
Remove RECORD.jws from a wheel by truncating the zip file.
RECORD.jws must be at the end of the archive. The zip file must be an
ordinary archive, with the compressed files and the directory in the same
order, and without any non-zip content after the truncation point.
"""
vzf = VerifyingZipFile(wheelfile, "a")
info = vzf.infolist()
if not (len(info) and info[-1].filename.endswith('/RECORD.jws')):
raise WheelError("RECORD.jws not found at end of archive.")
vzf.pop()
vzf.close()
def verify(wheelfile):
"""Verify a wheel.
The signature will be verified for internal consistency ONLY and printed.
Wheel's own unpack/install commands verify the manifest against the
signature and file contents.
"""
wf = WheelFile(wheelfile)
sig_name = wf.distinfo_name + '/RECORD.jws'
sig = json.loads(native(wf.zipfile.open(sig_name).read()))
verified = signatures.verify(sig)
sys.stderr.write("Signatures are internally consistent.\n")
sys.stdout.write(json.dumps(verified, indent=2))
sys.stdout.write('\n')
def unpack(wheelfile, dest='.'):
"""Unpack a wheel.
Wheel content will be unpacked to {dest}/{name}-{ver}, where {name}
is the package name and {ver} its version.
:param wheelfile: The path to the wheel.
:param dest: Destination directory (default to current directory).
"""
wf = WheelFile(wheelfile)
namever = wf.parsed_filename.group('namever')
destination = os.path.join(dest, namever)
sys.stderr.write("Unpacking to: %s\n" % (destination))
wf.zipfile.extractall(destination)
wf.zipfile.close()
def install(requirements, requirements_file=None,
wheel_dirs=None, force=False, list_files=False,
dry_run=False):
"""Install wheels.
:param requirements: A list of requirements or wheel files to install.
:param requirements_file: A file containing requirements to install.
:param wheel_dirs: A list of directories to search for wheels.
:param force: Install a wheel file even if it is not compatible.
:param list_files: Only list the files to install, don't install them.
:param dry_run: Do everything but the actual install.
"""
# If no wheel directories specified, use the WHEELPATH environment
# variable, or the current directory if that is not set.
if not wheel_dirs:
wheelpath = os.getenv("WHEELPATH")
if wheelpath:
wheel_dirs = wheelpath.split(os.pathsep)
else:
wheel_dirs = [ os.path.curdir ]
# Get a list of all valid wheels in wheel_dirs
all_wheels = []
for d in wheel_dirs:
for w in os.listdir(d):
if w.endswith('.whl'):
wf = WheelFile(os.path.join(d, w))
if wf.compatible:
all_wheels.append(wf)
# If there is a requirements file, add it to the list of requirements
if requirements_file:
# If the file doesn't exist, search for it in wheel_dirs
# This allows standard requirements files to be stored with the
# wheels.
if not os.path.exists(requirements_file):
for d in wheel_dirs:
name = os.path.join(d, requirements_file)
if os.path.exists(name):
requirements_file = name
break
with open(requirements_file) as fd:
requirements.extend(fd)
to_install = []
for req in requirements:
if req.endswith('.whl'):
# Explicitly specified wheel filename
if os.path.exists(req):
wf = WheelFile(req)
if wf.compatible or force:
to_install.append(wf)
else:
msg = ("{0} is not compatible with this Python. "
"--force to install anyway.".format(req))
raise WheelError(msg)
else:
# We could search on wheel_dirs, but it's probably OK to
# assume the user has made an error.
raise WheelError("No such wheel file: {}".format(req))
continue
# We have a requirement spec
# If we don't have pkg_resources, this will raise an exception
matches = matches_requirement(req, all_wheels)
if not matches:
raise WheelError("No match for requirement {}".format(req))
to_install.append(max(matches))
# We now have a list of wheels to install
if list_files:
sys.stdout.write("Installing:\n")
if dry_run:
return
for wf in to_install:
if list_files:
sys.stdout.write(" {0}\n".format(wf.filename))
continue
wf.install(force=force)
wf.zipfile.close()
def install_scripts(distributions):
"""
Regenerate the entry_points console_scripts for the named distribution.
"""
try:
from setuptools.command import easy_install
import pkg_resources
except ImportError:
raise RuntimeError("'wheel install_scripts' needs setuptools.")
for dist in distributions:
pkg_resources_dist = pkg_resources.get_distribution(dist)
install = get_install_command(dist)
command = easy_install.easy_install(install.distribution)
command.args = ['wheel'] # dummy argument
command.finalize_options()
command.install_egg_scripts(pkg_resources_dist)
def convert(installers, dest_dir, verbose):
require_pkgresources('wheel convert')
# Only support wheel convert if pkg_resources is present
from ..wininst2wheel import bdist_wininst2wheel
from ..egg2wheel import egg2wheel
for pat in installers:
for installer in iglob(pat):
if os.path.splitext(installer)[1] == '.egg':
conv = egg2wheel
else:
conv = bdist_wininst2wheel
if verbose:
sys.stdout.write("{0}... ".format(installer))
sys.stdout.flush()
conv(installer, dest_dir)
if verbose:
sys.stdout.write("OK\n")
def parser():
p = argparse.ArgumentParser()
s = p.add_subparsers(help="commands")
def keygen_f(args):
keygen()
keygen_parser = s.add_parser('keygen', help='Generate signing key')
keygen_parser.set_defaults(func=keygen_f)
def sign_f(args):
sign(args.wheelfile)
sign_parser = s.add_parser('sign', help='Sign wheel')
sign_parser.add_argument('wheelfile', help='Wheel file')
sign_parser.set_defaults(func=sign_f)
def unsign_f(args):
unsign(args.wheelfile)
unsign_parser = s.add_parser('unsign', help=unsign.__doc__)
unsign_parser.add_argument('wheelfile', help='Wheel file')
unsign_parser.set_defaults(func=unsign_f)
def verify_f(args):
verify(args.wheelfile)
verify_parser = s.add_parser('verify', help=verify.__doc__)
verify_parser.add_argument('wheelfile', help='Wheel file')
verify_parser.set_defaults(func=verify_f)
def unpack_f(args):
unpack(args.wheelfile, args.dest)
unpack_parser = s.add_parser('unpack', help='Unpack wheel')
unpack_parser.add_argument('--dest', '-d', help='Destination directory',
default='.')
unpack_parser.add_argument('wheelfile', help='Wheel file')
unpack_parser.set_defaults(func=unpack_f)
def install_f(args):
install(args.requirements, args.requirements_file,
args.wheel_dirs, args.force, args.list_files)
install_parser = s.add_parser('install', help='Install wheels')
install_parser.add_argument('requirements', nargs='*',
help='Requirements to install.')
install_parser.add_argument('--force', default=False,
action='store_true',
help='Install incompatible wheel files.')
install_parser.add_argument('--wheel-dir', '-d', action='append',
dest='wheel_dirs',
help='Directories containing wheels.')
install_parser.add_argument('--requirements-file', '-r',
help="A file containing requirements to "
"install.")
install_parser.add_argument('--list', '-l', default=False,
dest='list_files',
action='store_true',
help="List wheels which would be installed, "
"but don't actually install anything.")
install_parser.set_defaults(func=install_f)
def install_scripts_f(args):
install_scripts(args.distributions)
install_scripts_parser = s.add_parser('install-scripts', help='Install console_scripts')
install_scripts_parser.add_argument('distributions', nargs='*',
help='Regenerate console_scripts for these distributions')
install_scripts_parser.set_defaults(func=install_scripts_f)
def convert_f(args):
convert(args.installers, args.dest_dir, args.verbose)
convert_parser = s.add_parser('convert', help='Convert egg or wininst to wheel')
convert_parser.add_argument('installers', nargs='*', help='Installers to convert')
convert_parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
convert_parser.add_argument('--verbose', '-v', action='store_true')
convert_parser.set_defaults(func=convert_f)
def version_f(args):
from .. import __version__
sys.stdout.write("wheel %s\n" % __version__)
version_parser = s.add_parser('version', help='Print version and exit')
version_parser.set_defaults(func=version_f)
def help_f(args):
p.print_help()
help_parser = s.add_parser('help', help='Show this help')
help_parser.set_defaults(func=help_f)
return p
def main():
p = parser()
args = p.parse_args()
if not hasattr(args, 'func'):
p.print_help()
else:
# XXX on Python 3.3 we get 'args has no func' rather than short help.
try:
args.func(args)
return 0
except WheelError as e:
sys.stderr.write(e.message + "\n")
return 1
| mit |
pagea/bridgedb | lib/bridgedb/test/test_bridgerequest.py | 1 | 1770 | # -*- coding: utf-8 -*-
#_____________________________________________________________________________
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: Isis Lovecruft 0xA3ADB67A2CDB8B35 <isis@torproject.org>
# please also see AUTHORS file
# :copyright: (c) 2007-2014, The Tor Project, Inc.
# (c) 2014, Isis Lovecruft
# :license: see LICENSE for licensing information
#_____________________________________________________________________________
from twisted.trial import unittest
from bridgedb import bridgerequest
from bridgedb.bridgerequest import IRequestBridges
from bridgedb.bridgerequest import BridgeRequestBase
class BridgeRequestBaseTests(unittest.TestCase):
"""Unittests for :class:`bridgedb.bridgerequest.BridgeRequestBase`."""
def setUp(self):
"""Setup test run."""
self.request = BridgeRequestBase()
def test_BridgeRequestBase_implements_IRequestBridges(self):
"""BridgeRequestBase should implement IRequestBridges interface."""
self.assertTrue(IRequestBridges.implementedBy(BridgeRequestBase))
def test_BridgeRequestBase_withoutBlockInCountry(self):
"""BridgeRequestBase.withoutBlockInCountry() should add the country CC
to the ``notBlockedIn`` attribute.
"""
self.request.withoutBlockInCountry('US')
self.assertIn('US', self.request.notBlockedIn)
def test_BridgeRequestBase_withPluggableTransportType(self):
"""BridgeRequestBase.withPluggableTransportType() should add the
pluggable transport type to the ``transport`` attribute.
"""
self.request.withPluggableTransportType('huggable-transport')
self.assertIn('huggable-transport', self.request.transports)
| bsd-3-clause |
ppwwyyxx/tensorflow | tensorflow/python/framework/function_def_to_graph_test.py | 22 | 9133 | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.python.framework.function_def_to_graph."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.eager import function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function_def_to_graph
from tensorflow.python.framework import graph_to_function_def
from tensorflow.python.framework import op_def_library
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class FunctionDefToGraphTest(test.TestCase):
def _build_function_def(self):
with ops.Graph().as_default() as g:
# Inputs
x = array_ops.placeholder(dtypes.float32, name="x")
y = array_ops.placeholder(dtypes.float32, name="y")
# Outputs
sum_squares = math_ops.add_n(
[math_ops.pow(x, 2), math_ops.pow(y, 2)], name="sum_squares")
sum_cubes = math_ops.add_n(
[math_ops.pow(x, 3), math_ops.pow(y, 3)], name="sum_cubes")
fdef = graph_to_function_def.graph_to_function_def(
g,
g.get_operations(),
[x, y], # Inputs
[sum_squares, sum_cubes]) # Outputs.
fdef.signature.name = "_whats_in_a_name"
return fdef
@test_util.run_deprecated_v1
def testInputsAndOutputs(self):
fdef = self._build_function_def()
g = function_def_to_graph.function_def_to_graph(fdef)
self.assertEqual(g.name, "_whats_in_a_name")
with self.session(graph=g) as sess:
inputs = sess.run(g.inputs, feed_dict={"x:0": 2, "y:0": 3})
self.assertSequenceEqual(inputs, [2.0, 3.0])
outputs = sess.run(g.outputs, feed_dict={"x:0": 2, "y:0": 3})
self.assertSequenceEqual(outputs, [13.0, 35.0])
def testShapes(self):
fdef = self._build_function_def()
g = function_def_to_graph.function_def_to_graph(fdef)
self.assertIsNone(g.inputs[0].shape.dims) # Unknown dims.
self.assertIsNone(g.inputs[1].shape.dims) # Unknown dims.
self.assertIsNone(g.outputs[0].shape.dims) # Unknown dims.
self.assertIsNone(g.outputs[1].shape.dims) # Unknown dims.
g = function_def_to_graph.function_def_to_graph(
fdef,
input_shapes=[
tensor_shape.TensorShape([5]),
tensor_shape.TensorShape([5])
])
self.assertSequenceEqual(g.inputs[0].shape.dims, [5])
self.assertSequenceEqual(g.inputs[1].shape.dims, [5])
self.assertSequenceEqual(g.outputs[0].shape.dims, [5])
self.assertSequenceEqual(g.outputs[1].shape.dims, [5])
g = function_def_to_graph.function_def_to_graph(
fdef, input_shapes=[None, tensor_shape.TensorShape([5, 7])])
self.assertIsNone(g.inputs[0].shape.dims)
self.assertSequenceEqual(g.inputs[1].shape.dims, [5, 7])
self.assertSequenceEqual(g.outputs[0].shape.dims, [5, 7])
self.assertSequenceEqual(g.outputs[1].shape.dims, [5, 7])
# Should raise a ValueError if the length of input_shapes does not match
# the number of input args in FunctionDef.signature.input_arg.
with self.assertRaises(ValueError):
g = function_def_to_graph.function_def_to_graph(
fdef, input_shapes=[tensor_shape.TensorShape([5, 7])])
class FunctionDefToGraphDefTest(test.TestCase):
def _build_function_def(self):
with ops.Graph().as_default() as g:
# Inputs: x y z
# |\ | /
# | \ | /
# | foo_1 list_output
# | / \ / \
# | d_1 e_1 a:1 a:0
# | \ | / |
# | \ | / |
# | foo_2 |
# | / \ |
# Outputs: x d_2 e_2 a:0
x = array_ops.placeholder(dtypes.float32, name="x")
y = array_ops.placeholder(dtypes.int32, name="y")
z = array_ops.placeholder(dtypes.int32, name="z")
d_1, e_1 = op_def_library.apply_op("Foo1", name="foo_1", a=x, b=y, c=z)
list_output0, list_output1 = test_ops.list_output(
T=[dtypes.int32, dtypes.int32], name="list_output")
d_2, e_2 = test_ops.foo1(a=d_1, b=e_1, c=list_output1, name="foo_2")
fdef = graph_to_function_def.graph_to_function_def(
g,
g.get_operations(),
[x, y, z], # Inputs
[x, d_2, e_2, list_output0]) # Outputs.
# Assert that the FunctionDef was correctly built.
assert len(fdef.node_def) == 3 # 2 Foo1 nodes and 1 ListOutput node.
assert fdef.node_def[0].op == "Foo1"
assert fdef.node_def[0].input == ["x", "y", "z"]
assert fdef.node_def[1].op == "ListOutput"
assert not fdef.node_def[1].input
assert fdef.node_def[2].op == "Foo1"
assert fdef.node_def[2].input == [
"foo_1:d:0", "foo_1:e:0", "list_output:a:1"
]
return fdef
def testTensorNames(self):
fdef = self._build_function_def()
g, tensor_name_map = function_def_to_graph.function_def_to_graph_def(fdef)
# Verify that inputs of body nodes are correctly renamed.
# foo_1
self.assertSequenceEqual(g.node[3].input, ["x:0", "y:0", "z:0"])
# foo_2
self.assertSequenceEqual(g.node[5].input,
["foo_1:0", "foo_1:1", "list_output:1"])
# Verify that the `tensor_name_map` has the correct mapping.
self.assertDictEqual(
tensor_name_map, {
"x": "x:0",
"^x": "^x",
"y": "y:0",
"^y": "^y",
"z": "z:0",
"^z": "^z",
"foo_1:d:0": "foo_1:0",
"foo_1:e:0": "foo_1:1",
"^foo_1": "^foo_1",
"list_output:a:0": "list_output:0",
"list_output:a:1": "list_output:1",
"^list_output": "^list_output",
"foo_2:d:0": "foo_2:0",
"foo_2:e:0": "foo_2:1",
"^foo_2": "^foo_2",
})
def testShapes(self):
fdef = self._build_function_def()
g, _ = function_def_to_graph.function_def_to_graph_def(
fdef,
input_shapes=[
tensor_shape.TensorShape([]),
tensor_shape.TensorShape([5]), None
])
self.assertEqual("shape" in g.node[0].attr, True)
self.assertSequenceEqual(
tensor_shape.TensorShape(g.node[0].attr["shape"].shape).as_list(), [])
self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
self.assertEqual("shape" in g.node[1].attr, True)
self.assertSequenceEqual(
tensor_shape.TensorShape(g.node[1].attr["shape"].shape).as_list(), [5])
self.assertEqual(g.node[0].attr["shape"].shape.unknown_rank, False)
self.assertFalse("shape" in g.node[2].attr)
def testControlDependencies(self):
v = variables.Variable(1)
@function.defun
def fn(inp):
assign = v.assign(3, name="assign", read_value=False)
x = constant_op.constant(2.0, name="x")
# TODO(b/79881896): Test external control dependency once that's
# supported.
with ops.control_dependencies([x, inp, assign]):
constant_op.constant(3.0, name="y")
return 4.0
inp = constant_op.constant(1.0)
fdef = fn.get_concrete_function(inp).function_def
func_graph = function_def_to_graph.function_def_to_graph(fdef)
op = func_graph.get_operation_by_name("y")
self.assertEqual(len(op.control_inputs), 3)
self.assertEqual(op.control_inputs[0].name, "assign")
self.assertEqual(op.control_inputs[1].name, "inp")
self.assertEqual(op.control_inputs[2].name, "x")
def testAttributesForArgDef(self):
@function.defun
def fn(x):
return x
inp = constant_op.constant(1.0)
fdef = fn.get_concrete_function(inp).function_def
fdef.arg_attr[0].attr["_test_attr"].s = "value".encode("ascii")
graph_def = function_def_to_graph.function_def_to_graph_def(fdef)
placeholders = [
ndef for ndef in graph_def[0].node if ndef.op == "Placeholder"
]
self.assertEqual(1, len(placeholders))
self.assertEqual(placeholders[0].attr["_test_attr"].s,
"value".encode("ascii"))
if __name__ == "__main__":
test.main()
| apache-2.0 |
tamarin-prover/tamarin-prover | examples/asiaccs20-eccDAA/QUOTE/Oracle_Quote.py | 2 | 15087 | #!/usr/bin/python
import re
import os
import sys
debug = True
lines = sys.stdin.readlines()
lemma = sys.argv[1]
# INPUT:
# - lines contain a list of "%i:goal" where "%i" is the index of the goal
# - lemma contain the name of the lemma under scrutiny
# OUTPUT:
# - (on stdout) a list of ordered index separated by EOL
rank = [] # list of list of goals, main list is ordered by priority
maxPrio = 110
for i in range(0,maxPrio):
rank.append([])
if lemma[0:11]=="oracle_corr": #SP1
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*Host_.*', line): rank[109].append(num)
elif re.match('.*\!SignatureVerified.*', line): rank[109].append(num)
elif re.match('.*Issuer_*', line): rank[108].append(num)
elif re.match('.*\!TPM_.*', line): rank[107].append(num)
elif re.match('.*TPM_tkt.*', line): rank[107].append(num)
elif re.match('.*TPM_CV_E.*', line): rank[107].append(num)
elif re.match('.*TPM_Commit_RCV1.*', line): rank[107].append(num)
elif re.match('.*createPrimary.*', line): rank[90].append(num)
elif re.match('.*returnEK\'>', line): rank[89].append(num)
elif re.match('.*createDAAKey.*', line): rank[88].append(num)
elif re.match('.*returnDAAKey\'>', line): rank[87].append(num)
elif re.match('.*TPM2_ActivateCredentials\'>', line): rank[86].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials\'>', line): rank[85].append(num)
elif re.match('.*TPM2_Commit\'>', line): rank[84].append(num)
elif re.match('.*retTPM2_commit\'>', line): rank[83].append(num)
elif re.match('.*TPM2_Hash\'>', line): rank[82].append(num)
elif re.match('.*ret_TPM2_Hash\'>', line): rank[81].append(num)
elif re.match('.*\'TPM2_Sign\'>', line): rank[80].append(num)
elif re.match('.*ret_TPM2_Sign\'>', line): rank[79].append(num)
elif re.match('.*\'TPM2_ActivateCredentials_2\'.*', line): rank[78].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials_2\'>', line): rank[77].append(num)
elif re.match('.*TPM2_Commit_rand\'>', line): rank[76].append(num)
elif re.match('.*ret_TPM2_Commit_rand\'>', line): rank[75].append(num)
elif re.match('.*TPM2_Hash_2\'>', line): rank[74].append(num)
elif re.match('.*ret_TPM2_Hash_2\'>', line): rank[73].append(num)
elif re.match('.*\'TPM2_Sign_2\'>', line): rank[72].append(num)
elif re.match('.*ret_TPM2_Sign_2\'>', line): rank[71].append(num)
elif re.match('.*\!KU\( pk\(KDF_EK\(~TPM_EK_Seed\).*', line): rank[70].append(num)
elif re.match('.*\!KU\( multp\(f.*, \'P1\'.*', line): rank[70].append(num)
elif re.match('.*\!KU\(.*<\'1\'.*', line): rank[60].append(num)
elif re.match('.*\!KU\( KDF_EK\(~TPM_EK_Seed\).*', line): rank[50].append(num)
elif lemma[0:15]=="oracle_SP3_Unfo":#SP3
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*\!Pk\(.*', line): rank[109].append(num)
elif re.match('.*\!TPM_.*', line): rank[109].append(num)
elif re.match('.*\!KU\( ~y \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~x \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~r \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~f \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~l \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~r_cv.*', line): rank[100].append(num)
elif re.match('.*\!KU\( plus\(~r_cv.*', line): rank[99].append(num)
elif re.match('.*\!KU\( senc\(.*~r.*~.*~.*~.*~.*~.*~.*.~.*~.*', line): rank[98].append(num)
elif re.match('.*\!KU\( senc\(.*~f.*~.*~.*~.*~.*~.*~.*.~.*~.*', line): rank[98].append(num)
elif re.match('.*\!KU\( multp\(~l,.*', line): rank[97].append(num)
elif re.match('.*\!KU\( H_n_2\(Nonce.*quotePCR.*', line): rank[96].append(num)
elif re.match('.*\!KU\( E_S.*',line): rank[95].append(num)
elif re.match('.*createPrimary.*', line): rank[93].append(num)
elif re.match('.*returnEK\'>', line): rank[92].append(num)
elif re.match('.*createDAAKey.*', line): rank[91].append(num)
elif re.match('.*returnDAAKey\'>', line): rank[90].append(num)
elif re.match('.*\!KU\(.*<\'1\'.*', line): rank[89].append(num)
elif re.match('.*In_S\( \$AS, .*', line): rank[87].append(num)
elif re.match('.*In_S\( \$PS, .*', line): rank[87].append(num)
elif re.match('.*TPM2_ActivateCredentials\'>', line): rank[86].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials\'>', line): rank[85].append(num)
elif re.match('.*TPM2_Commit\'>', line): rank[84].append(num)
elif re.match('.*retTPM2_commit\'>', line): rank[83].append(num)
elif re.match('.*TPM2_Hash\'>', line): rank[82].append(num)
elif re.match('.*ret_TPM2_Hash\'>', line): rank[81].append(num)
elif re.match('.*\'TPM2_Sign\'>', line): rank[80].append(num)
elif re.match('.*ret_TPM2_Sign\'>', line): rank[79].append(num)
elif re.match('.*\'TPM2_ActivateCredentials_2\'.*', line): rank[78].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials_2\'>', line): rank[77].append(num)
elif re.match('.*TPM2_Commit_rand\'>', line): rank[76].append(num)
elif re.match('.*ret_TPM2_Commit_rand\'>', line): rank[75].append(num)
elif re.match('.*TPM2_Quote\'>', line): rank[74].append(num)
elif re.match('.*ret_TPM2_Quote\'>', line): rank[73].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*', line): rank[70].append(num)
elif re.match('.*\!KU\(.*H_n_2\(Nonce\(rnd.*', line): rank[60].append(num)
elif re.match('.*\!KU\( multp\(~y.*', line): rank[60].append(num)
elif re.match('.*\!KU\( multp\(multp\(~r.*', line): rank[40].append(num)
elif re.match('.*\!KU\( KDF_EK\(~TPM_EK_Seed.*', line): rank[40].append(num)
elif re.match('.*\!KU\( multp\(multp\(~r.*', line): rank[40].append(num)
elif re.match('.*\!KU\( KDF_AES\(~TPM.*', line): rank[30].append(num)
elif lemma[0:17]=="oracle_auth_alive":
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*In_S\(.*', line): rank[106].append(num)
elif re.match('.*Host_.*', line): rank[109].append(num)
elif re.match('.*Issuer_*', line): rank[108].append(num)
elif lemma[0:16]=="oracle_auth_weak" or lemma[0:15]=="oracle_auth_non" or lemma[0:21]=="oracle_auth_injective":
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*In_S\( \$B,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$A,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$AS,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$PS,.*', line): rank[109].append(num)
elif re.match('.*_State_.*', line): rank[109].append(num)
elif re.match('.*\!Pk\(.*', line): rank[109].append(num)
elif re.match('.*createDAAKey\'>', line): rank[108].append(num)
elif re.match('.*createPrimary\'\)', line): rank[108].append(num)
elif re.match('.*returnEK\'>', line): rank[108].append(num)
elif re.match('.*returnDAAKey\'>', line): rank[108].append(num)
elif re.match('.*In_S\(.*', line): rank[107].append(num)
elif re.match('.*\!KU\( KDF_EK\(~TPM.*', line): rank[108].append(num)
elif re.match('.*\!KU\( KDF_AES\(~TPM.*', line): rank[108].append(num)
elif re.match('.*\!KU\( curlyK\(~K_1\).*', line): rank[107].append(num)
elif re.match('.*\!KU\( curlyK\(~K_1_1\).*', line): rank[107].append(num)
elif re.match('.*\!KU\( multp\(~y,.*', line): rank[107].append(num)
elif re.match('.*\!KU\( ~y \)', line): rank[107].append(num)
elif re.match('.*\!KU\(.*curlyK\(~K_1_1\).*', line): rank[106].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*multp\(H_n_2.*~f\)', line): rank[106].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*multp\(H_n_2.*~f.1\)', line): rank[106].append(num)
elif re.match('.*\!KU\( plus\(multp\(~x,.*', line): rank[106].append(num)
elif re.match('.*\!KU\( multp\(~x,.*', line): rank[106].append(num)
elif re.match('.*\!KU\( multp\(H_n_2.*~f\)', line): rank[106].append(num)
elif re.match('.*\!KU\( multp\(H_n_2.*~f.1\)', line): rank[106].append(num)
elif re.match('.*\!KU\( ~f \)', line): rank[106].append(num)
elif re.match('.*\!KU\( ~f.1 \)', line): rank[106].append(num)
elif re.match('.*\!KU\( ~x \)', line): rank[106].append(num)
elif re.match('.*\!KU\(.*<\'1\'.*~.*~.*', line): rank[101].append(num)
elif re.match('.*\!KU\( senc.*~.*~.*', line): rank[100].append(num)
elif re.match('.*\!KU\( ~TPM_EK.*', line): rank[100].append(num)
elif (not(re.match('.*KU\( Nonce\(~n_J\).*', line)) and not(re.match('.*splitEqs\(0\).*', line))): rank[1].append(num)
elif (re.match('.*KU\( Nonce\(~n_J\).*', line) or (re.match('.*splitEqs\(0\).*', line))): rank[0].append(num)
elif lemma[0:19]=="oracle_auth_secrecy":
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*In_S\( \$B,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$A,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$AS,.*', line): rank[109].append(num)
elif re.match('.*In_S\( \$PS,.*', line): rank[109].append(num)
elif re.match('.*_State_.*', line): rank[109].append(num)
elif re.match('.*\!Pk\(.*', line): rank[109].append(num)
elif re.match('.*createDAAKey\'>', line): rank[108].append(num)
elif re.match('.*createPrimary\'\)', line): rank[108].append(num)
elif re.match('.*returnEK\'>', line): rank[108].append(num)
elif re.match('.*returnDAAKey\'>', line): rank[108].append(num)
elif re.match('.*In_S\(.*', line): rank[107].append(num)
elif re.match('.*\!KU\( pk\( KDF_EK\(~TPM_EK.*', line): rank[100].append(num)
elif re.match('.*\!KU\( KDF_EK\(~TPM_EK.*', line): rank[100].append(num)
elif re.match('.*\!KU\( curlyK\(~K_2\) \)', line): rank[91].append(num)
elif re.match('.*\!KU\( curlyK\(~K_1\) \)', line): rank[90].append(num)
elif re.match('.*\!KU\( ~K_2 \)', line): rank[91].append(num)
elif re.match('.*\!KU\( ~K_1 \)', line): rank[90].append(num)
elif re.match('.*\!KU\( ~K \)', line): rank[90].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*multp\(H_n_2.*~f\)', line): rank[80].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*multp\(H_n_2.*~f.1\)', line): rank[80].append(num)
elif re.match('.*\!KU\( plus\(multp\(~x,.*', line): rank[70].append(num)
elif re.match('.*\!KU\( multp\(~x,.*', line): rank[85].append(num)
elif re.match('.*\!KU\( multp\(~y,.*', line): rank[90].append(num)
elif re.match('.*\!KU\( multp\(H_n_2.*~f\)', line): rank[70].append(num)
elif re.match('.*\!KU\( multp\(H_n_2.*~f.1\)', line): rank[70].append(num)
elif re.match('.*\!KU\( ~f \)', line): rank[105].append(num)
elif re.match('.*\!KU\( ~f.1 \)', line): rank[105].append(num)
elif re.match('.*\!KU\( ~x \)', line): rank[105].append(num)
elif re.match('.*\!KU\( ~y \)', line): rank[105].append(num)
elif re.match('.*\!KU\( ~s_2 \)', line): rank[105].append(num)
elif re.match('.*\!KU\( ~s \)', line): rank[105].append(num)
elif re.match('.*\!KU\( senc.*~.*~.*', line): rank[60].append(num)
elif re.match('.*\!KU\( ~TPM_EK.*', line): rank[105].append(num)
elif lemma[0:14]=="oracle_SP4_Non":
print "applying oracle to "+lemma
for line in lines:
num = line.split(':')[0]
if re.match('.*\!SignatureVerified.*', line): rank[108].append(num)
elif re.match('.*Host_.*', line): rank[107].append(num)
elif re.match('.*_State_.*', line): rank[106].append(num)
elif re.match('.*\'ret_TPM2_Sign_2\'>', line): rank[105].append(num)
elif re.match('.*\!KU\( ~y \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~x \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~r \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~f \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~l \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~y \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~pcr_set \)', line): rank[100].append(num)
elif re.match('.*\!KU\( ~r_cv.*', line): rank[100].append(num)
elif re.match('.*\!KU\( plus\(~r_cv.*', line): rank[99].append(num)
elif re.match('.*\!KU\( H_n_2\(Nonce.*quotePCR.*', line): rank[98].append(num)
elif re.match('.*\!KU\( H_k_2\(H_k_9\(\'pcr.*', line): rank[97].append(num)
elif re.match('.*\!KU\( H_k_9\(\'pcr.*', line): rank[97].append(num)
elif re.match('.*\!KU\( multp\(~l,.*', line): rank[97].append(num)
elif re.match('.*\!KU\( senc\(.*~r.*~.*~.*~.*~.*~.*~.*.~.*~.*', line): rank[96].append(num)
elif re.match('.*\!KU\( senc\(.*~f.*~.*~.*~.*~.*~.*~.*.~.*~.*', line): rank[96].append(num)
elif re.match('.*\!KU\( E_S.*',line): rank[95].append(num)
elif re.match('.*createPrimary.*', line): rank[93].append(num)
elif re.match('.*returnEK\'>', line): rank[92].append(num)
elif re.match('.*createDAAKey.*', line): rank[91].append(num)
elif re.match('.*returnDAAKey\'>', line): rank[90].append(num)
elif re.match('.*\!KU\(.*<\'1\'.*', line): rank[89].append(num)
elif re.match('.*\!KU\( plus\(r_cv.*', line): rank[88].append(num)
elif re.match('.*\!KU\( multp\(H_n_2\(Nonce.*', line): rank[88].append(num)
elif re.match('.*In_S\( \$AS, .*', line): rank[87].append(num)
elif re.match('.*In_S\( \$PS, .*', line): rank[87].append(num)
elif re.match('.*TPM2_ActivateCredentials\'>', line): rank[86].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials\'>', line): rank[85].append(num)
elif re.match('.*TPM2_Commit\'>', line): rank[84].append(num)
elif re.match('.*retTPM2_commit\'>', line): rank[83].append(num)
elif re.match('.*TPM2_Hash\'>', line): rank[82].append(num)
elif re.match('.*ret_TPM2_Hash\'>', line): rank[81].append(num)
elif re.match('.*\'TPM2_Sign\'>', line): rank[80].append(num)
elif re.match('.*ret_TPM2_Sign\'>', line): rank[79].append(num)
elif re.match('.*\'TPM2_ActivateCredentials_2\'.*', line): rank[78].append(num)
elif re.match('.*ret_TPM2_ActivateCredentials_2\'>', line): rank[77].append(num)
elif re.match('.*TPM2_Commit_rand\'>', line): rank[76].append(num)
elif re.match('.*ret_TPM2_Commit_rand\'>', line): rank[75].append(num)
elif re.match('.*TPM2_Quote\'>', line): rank[74].append(num)
elif re.match('.*ret_TPM2_Quote\'>', line): rank[73].append(num)
elif re.match('.*\!KU\(.*H_n_2\(Nonce\(rnd.*', line): rank[60].append(num)
elif re.match('.*\!KU\( multp\(~y.*', line): rank[60].append(num)
elif re.match('.*\!KU\( multp\(multp\(~r.*', line): rank[40].append(num)
elif re.match('.*\!KU\( KDF_EK\(~TPM_EK_Seed.*', line): rank[40].append(num)
elif re.match('.*\!KU\( multp\(multp\(~r.*', line): rank[40].append(num)
elif re.match('.*\!KU\( KDF_AES\(~TPM.*', line): rank[30].append(num)
else:
print "not applying the rule"
exit(0)
# Ordering all goals by ranking (higher first)
for listGoals in reversed(rank):
for goal in listGoals:
sys.stderr.write(goal)
print goal
| gpl-3.0 |
rationalAgent/edx-platform-custom | common/lib/xmodule/xmodule/combined_open_ended_module.py | 2 | 19695 | import logging
from lxml import etree
from pkg_resources import resource_string
from xmodule.raw_module import RawDescriptor
from .x_module import XModule
from xblock.core import Integer, Scope, String, List, Float, Boolean
from xmodule.open_ended_grading_classes.combined_open_ended_modulev1 import CombinedOpenEndedV1Module, CombinedOpenEndedV1Descriptor
from collections import namedtuple
from .fields import Date
import textwrap
log = logging.getLogger("mitx.courseware")
V1_SETTINGS_ATTRIBUTES = ["display_name", "max_attempts", "graded", "accept_file_upload",
"skip_spelling_checks", "due", "graceperiod", "weight"]
V1_STUDENT_ATTRIBUTES = ["current_task_number", "task_states", "state",
"student_attempts", "ready_to_reset"]
V1_ATTRIBUTES = V1_SETTINGS_ATTRIBUTES + V1_STUDENT_ATTRIBUTES
VersionTuple = namedtuple('VersionTuple', ['descriptor', 'module', 'settings_attributes', 'student_attributes'])
VERSION_TUPLES = {
1: VersionTuple(CombinedOpenEndedV1Descriptor, CombinedOpenEndedV1Module, V1_SETTINGS_ATTRIBUTES,
V1_STUDENT_ATTRIBUTES),
}
DEFAULT_VERSION = 1
DEFAULT_DATA = textwrap.dedent("""\
<combinedopenended>
<prompt>
<h3>Censorship in the Libraries</h3>
<p>'All of us can think of a book that we hope none of our children or any other children have taken off the shelf. But if I have the right to remove that book from the shelf -- that work I abhor -- then you also have exactly the same right and so does everyone else. And then we have no books left on the shelf for any of us.' --Katherine Paterson, Author
</p>
<p>
Write a persuasive essay to a newspaper reflecting your vies on censorship in libraries. Do you believe that certain materials, such as books, music, movies, magazines, etc., should be removed from the shelves if they are found offensive? Support your position with convincing arguments from your own experience, observations, and/or reading.
</p>
</prompt>
<rubric>
<rubric>
<category>
<description>
Ideas
</description>
<option>
Difficult for the reader to discern the main idea. Too brief or too repetitive to establish or maintain a focus.
</option>
<option>
Attempts a main idea. Sometimes loses focus or ineffectively displays focus.
</option>
<option>
Presents a unifying theme or main idea, but may include minor tangents. Stays somewhat focused on topic and task.
</option>
<option>
Presents a unifying theme or main idea without going off on tangents. Stays completely focused on topic and task.
</option>
</category>
<category>
<description>
Content
</description>
<option>
Includes little information with few or no details or unrelated details. Unsuccessful in attempts to explore any facets of the topic.
</option>
<option>
Includes little information and few or no details. Explores only one or two facets of the topic.
</option>
<option>
Includes sufficient information and supporting details. (Details may not be fully developed; ideas may be listed.) Explores some facets of the topic.
</option>
<option>
Includes in-depth information and exceptional supporting details that are fully developed. Explores all facets of the topic.
</option>
</category>
<category>
<description>
Organization
</description>
<option>
Ideas organized illogically, transitions weak, and response difficult to follow.
</option>
<option>
Attempts to logically organize ideas. Attempts to progress in an order that enhances meaning, and demonstrates use of transitions.
</option>
<option>
Ideas organized logically. Progresses in an order that enhances meaning. Includes smooth transitions.
</option>
</category>
<category>
<description>
Style
</description>
<option>
Contains limited vocabulary, with many words used incorrectly. Demonstrates problems with sentence patterns.
</option>
<option>
Contains basic vocabulary, with words that are predictable and common. Contains mostly simple sentences (although there may be an attempt at more varied sentence patterns).
</option>
<option>
Includes vocabulary to make explanations detailed and precise. Includes varied sentence patterns, including complex sentences.
</option>
</category>
<category>
<description>
Voice
</description>
<option>
Demonstrates language and tone that may be inappropriate to task and reader.
</option>
<option>
Demonstrates an attempt to adjust language and tone to task and reader.
</option>
<option>
Demonstrates effective adjustment of language and tone to task and reader.
</option>
</category>
</rubric>
</rubric>
<task>
<selfassessment/></task>
<task>
<openended min_score_to_attempt="4" max_score_to_attempt="12" >
<openendedparam>
<initial_display>Enter essay here.</initial_display>
<answer_display>This is the answer.</answer_display>
<grader_payload>{"grader_settings" : "ml_grading.conf", "problem_id" : "6.002x/Welcome/OETest"}</grader_payload>
</openendedparam>
</openended>
</task>
<task>
<openended min_score_to_attempt="9" max_score_to_attempt="12" >
<openendedparam>
<initial_display>Enter essay here.</initial_display>
<answer_display>This is the answer.</answer_display>
<grader_payload>{"grader_settings" : "peer_grading.conf", "problem_id" : "6.002x/Welcome/OETest"}</grader_payload>
</openendedparam>
</openended>
</task>
</combinedopenended>
""")
class VersionInteger(Integer):
"""
A model type that converts from strings to integers when reading from json.
Also does error checking to see if version is correct or not.
"""
def from_json(self, value):
try:
value = int(value)
if value not in VERSION_TUPLES:
version_error_string = "Could not find version {0}, using version {1} instead"
log.error(version_error_string.format(value, DEFAULT_VERSION))
value = DEFAULT_VERSION
except:
value = DEFAULT_VERSION
return value
class CombinedOpenEndedFields(object):
display_name = String(
display_name="Display Name",
help="This name appears in the horizontal navigation at the top of the page.",
default="Open Response Assessment",
scope=Scope.settings
)
current_task_number = Integer(
help="Current task that the student is on.",
default=0,
scope=Scope.user_state
)
task_states = List(
help="List of state dictionaries of each task within this module.",
scope=Scope.user_state
)
state = String(
help="Which step within the current task that the student is on.",
default="initial",
scope=Scope.user_state
)
graded = Boolean(
display_name="Graded",
help='Defines whether the student gets credit for grading this problem.',
default=False,
scope=Scope.settings
)
student_attempts = Integer(
help="Number of attempts taken by the student on this problem",
default=0,
scope=Scope.user_state
)
ready_to_reset = Boolean(
help="If the problem is ready to be reset or not.",
default=False,
scope=Scope.user_state
)
max_attempts = Integer(
display_name="Maximum Attempts",
help="The number of times the student can try to answer this problem.",
default=1,
scope=Scope.settings,
values={"min" : 1 }
)
accept_file_upload = Boolean(
display_name="Allow File Uploads",
help="Whether or not the student can submit files as a response.",
default=False,
scope=Scope.settings
)
skip_spelling_checks = Boolean(
display_name="Disable Quality Filter",
help="If False, the Quality Filter is enabled and submissions with poor spelling, short length, or poor grammar will not be peer reviewed.",
default=False,
scope=Scope.settings
)
due = Date(
help="Date that this problem is due by",
default=None,
scope=Scope.settings
)
graceperiod = String(
help="Amount of time after the due date that submissions will be accepted",
default=None,
scope=Scope.settings
)
version = VersionInteger(help="Current version number", default=DEFAULT_VERSION, scope=Scope.settings)
data = String(help="XML data for the problem", scope=Scope.content,
default=DEFAULT_DATA)
weight = Float(
display_name="Problem Weight",
help="Defines the number of points each problem is worth. If the value is not set, each problem is worth one point.",
scope=Scope.settings,
values={"min" : 0 , "step": ".1"},
default=1
)
markdown = String(
help="Markdown source of this module",
default=textwrap.dedent("""\
[prompt]
<h3>Censorship in the Libraries</h3>
<p>'All of us can think of a book that we hope none of our children or any other children have taken off the shelf. But if I have the right to remove that book from the shelf -- that work I abhor -- then you also have exactly the same right and so does everyone else. And then we have no books left on the shelf for any of us.' --Katherine Paterson, Author
</p>
<p>
Write a persuasive essay to a newspaper reflecting your vies on censorship in libraries. Do you believe that certain materials, such as books, music, movies, magazines, etc., should be removed from the shelves if they are found offensive? Support your position with convincing arguments from your own experience, observations, and/or reading.
</p>
[prompt]
[rubric]
+ Ideas
- Difficult for the reader to discern the main idea. Too brief or too repetitive to establish or maintain a focus.
- Attempts a main idea. Sometimes loses focus or ineffectively displays focus.
- Presents a unifying theme or main idea, but may include minor tangents. Stays somewhat focused on topic and task.
- Presents a unifying theme or main idea without going off on tangents. Stays completely focused on topic and task.
+ Content
- Includes little information with few or no details or unrelated details. Unsuccessful in attempts to explore any facets of the topic.
- Includes little information and few or no details. Explores only one or two facets of the topic.
- Includes sufficient information and supporting details. (Details may not be fully developed; ideas may be listed.) Explores some facets of the topic.
- Includes in-depth information and exceptional supporting details that are fully developed. Explores all facets of the topic.
+ Organization
- Ideas organized illogically, transitions weak, and response difficult to follow.
- Attempts to logically organize ideas. Attempts to progress in an order that enhances meaning, and demonstrates use of transitions.
- Ideas organized logically. Progresses in an order that enhances meaning. Includes smooth transitions.
+ Style
- Contains limited vocabulary, with many words used incorrectly. Demonstrates problems with sentence patterns.
- Contains basic vocabulary, with words that are predictable and common. Contains mostly simple sentences (although there may be an attempt at more varied sentence patterns).
- Includes vocabulary to make explanations detailed and precise. Includes varied sentence patterns, including complex sentences.
+ Voice
- Demonstrates language and tone that may be inappropriate to task and reader.
- Demonstrates an attempt to adjust language and tone to task and reader.
- Demonstrates effective adjustment of language and tone to task and reader.
[rubric]
[tasks]
(Self), ({4-12}AI), ({9-12}Peer)
[tasks]
"""),
scope=Scope.settings
)
class CombinedOpenEndedModule(CombinedOpenEndedFields, XModule):
"""
This is a module that encapsulates all open ended grading (self assessment, peer assessment, etc).
It transitions between problems, and support arbitrary ordering.
Each combined open ended module contains one or multiple "child" modules.
Child modules track their own state, and can transition between states. They also implement get_html and
handle_ajax.
The combined open ended module transitions between child modules as appropriate, tracks its own state, and passess
ajax requests from the browser to the child module or handles them itself (in the cases of reset and next problem)
ajax actions implemented by all children are:
'save_answer' -- Saves the student answer
'save_assessment' -- Saves the student assessment (or external grader assessment)
'save_post_assessment' -- saves a post assessment (hint, feedback on feedback, etc)
ajax actions implemented by combined open ended module are:
'reset' -- resets the whole combined open ended module and returns to the first child module
'next_problem' -- moves to the next child module
'get_results' -- gets results from a given child module
Types of children. Task is synonymous with child module, so each combined open ended module
incorporates multiple children (tasks):
openendedmodule
selfassessmentmodule
CombinedOpenEndedModule.__init__ takes the same arguments as xmodule.x_module:XModule.__init__
"""
STATE_VERSION = 1
# states
INITIAL = 'initial'
ASSESSING = 'assessing'
INTERMEDIATE_DONE = 'intermediate_done'
DONE = 'done'
icon_class = 'problem'
js = {
'coffee':
[
resource_string(__name__, 'js/src/combinedopenended/display.coffee'),
resource_string(__name__, 'js/src/collapsible.coffee'),
resource_string(__name__, 'js/src/javascript_loader.coffee'),
]
}
js_module_name = "CombinedOpenEnded"
css = {'scss': [resource_string(__name__, 'css/combinedopenended/display.scss')]}
def __init__(self, *args, **kwargs):
"""
Definition file should have one or many task blocks, a rubric block, and a prompt block.
See DEFAULT_DATA for a sample.
"""
XModule.__init__(self, *args, **kwargs)
self.system.set('location', self.location)
if self.task_states is None:
self.task_states = []
version_tuple = VERSION_TUPLES[self.version]
self.student_attributes = version_tuple.student_attributes
self.settings_attributes = version_tuple.settings_attributes
attributes = self.student_attributes + self.settings_attributes
static_data = {}
instance_state = {k: getattr(self, k) for k in attributes}
self.child_descriptor = version_tuple.descriptor(self.system)
self.child_definition = version_tuple.descriptor.definition_from_xml(etree.fromstring(self.data), self.system)
self.child_module = version_tuple.module(self.system, self.location, self.child_definition, self.child_descriptor,
instance_state=instance_state, static_data=static_data,
attributes=attributes)
self.save_instance_data()
def get_html(self):
self.save_instance_data()
return_value = self.child_module.get_html()
return return_value
def handle_ajax(self, dispatch, data):
self.save_instance_data()
return_value = self.child_module.handle_ajax(dispatch, data)
self.save_instance_data()
return return_value
def get_instance_state(self):
return self.child_module.get_instance_state()
def get_score(self):
return self.child_module.get_score()
def max_score(self):
return self.child_module.max_score()
def get_progress(self):
return self.child_module.get_progress()
@property
def due_date(self):
return self.child_module.due_date
def save_instance_data(self):
for attribute in self.student_attributes:
setattr(self, attribute, getattr(self.child_module, attribute))
class CombinedOpenEndedDescriptor(CombinedOpenEndedFields, RawDescriptor):
"""
Module for adding combined open ended questions
"""
mako_template = "widgets/open-ended-edit.html"
module_class = CombinedOpenEndedModule
has_score = True
always_recalculate_grades = True
template_dir_name = "combinedopenended"
#Specify whether or not to pass in S3 interface
needs_s3_interface = True
#Specify whether or not to pass in open ended interface
needs_open_ended_interface = True
metadata_attributes = RawDescriptor.metadata_attributes
js = {'coffee': [resource_string(__name__, 'js/src/combinedopenended/edit.coffee')]}
js_module_name = "OpenEndedMarkdownEditingDescriptor"
css = {'scss': [resource_string(__name__, 'css/editor/edit.scss'), resource_string(__name__, 'css/combinedopenended/edit.scss')]}
metadata_translations = {
'is_graded': 'graded',
'attempts': 'max_attempts',
}
def get_context(self):
_context = RawDescriptor.get_context(self)
_context.update({'markdown': self.markdown,
'enable_markdown': self.markdown is not None})
return _context
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(CombinedOpenEndedDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([CombinedOpenEndedDescriptor.due, CombinedOpenEndedDescriptor.graceperiod,
CombinedOpenEndedDescriptor.markdown, CombinedOpenEndedDescriptor.version])
return non_editable_fields
| agpl-3.0 |
Fl0rianFischer/sme_odoo | addons/mrp/res_config.py | 45 | 2642 | # -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from openerp.osv import fields, osv
from openerp.tools.translate import _
class mrp_config_settings(osv.osv_memory):
_name = 'mrp.config.settings'
_inherit = 'res.config.settings'
_columns = {
'group_product_variant': fields.selection([
(0, "No variants on products"),
(1, 'Products can have several attributes, defining variants (Example: size, color,...)')
], "Product Variants",
help='Work with product variant allows you to define some variant of the same products, an ease the product management in the ecommerce for example',
implied_group='product.group_product_variant'),
'module_mrp_operations': fields.selection([
(0, "Do not use a planning for the work orders "),
(1, "Allow detailed planning of work orders")
], "Work Order Planning",
help='This allows to add state, date_start,date_stop in production order operation lines (in the "Work Centers" tab).\n'
'-This installs the module mrp_operations.'),
'module_mrp_byproduct': fields.selection([
(0, "No by-products in bills of materials (A + B --> C)"),
(1, "Bills of materials may produce residual products (A + B --> C + D)")
], "By-Products",
help='You can configure by-products in the bill of material.\n'
'Without this module: A + B + C -> D.\n'
'With this module: A + B + C -> D + E.\n'
'-This installs the module mrp_byproduct.'),
'group_mrp_routings': fields.selection([
(0, "Manage production by manufacturing orders"),
(1, "Manage production by work orders")
], "Routings",
implied_group='mrp.group_mrp_routings',
help='Work Order Operations allow you to create and manage the manufacturing operations that should be followed '
'within your work centers in order to produce a product. They are attached to bills of materials '
'that will define the required raw materials.'),
'group_rounding_efficiency': fields.selection([
(0, "No rounding and efficiency on bills of materials"),
(1, "Manage rounding and efficiency of bills of materials components")
], "Rounding efficiency",
implied_group='mrp.group_rounding_efficiency',
help="""Allow to manage product rounding on quantity and product efficiency during production process"""),
}
| gpl-3.0 |
aidanlister/django | tests/many_to_one/models.py | 215 | 2785 | """
Many-to-one relationships
To define a many-to-one relationship, use ``ForeignKey()``.
"""
from __future__ import unicode_literals
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Reporter(models.Model):
first_name = models.CharField(max_length=30)
last_name = models.CharField(max_length=30)
email = models.EmailField()
def __str__(self):
return "%s %s" % (self.first_name, self.last_name)
@python_2_unicode_compatible
class Article(models.Model):
headline = models.CharField(max_length=100)
pub_date = models.DateField()
reporter = models.ForeignKey(Reporter, models.CASCADE)
def __str__(self):
return self.headline
class Meta:
ordering = ('headline',)
# If ticket #1578 ever slips back in, these models will not be able to be
# created (the field names being lower-cased versions of their opposite
# classes is important here).
class First(models.Model):
second = models.IntegerField()
class Second(models.Model):
first = models.ForeignKey(First, models.CASCADE, related_name='the_first')
# Protect against repetition of #1839, #2415 and #2536.
class Third(models.Model):
name = models.CharField(max_length=20)
third = models.ForeignKey('self', models.SET_NULL, null=True, related_name='child_set')
class Parent(models.Model):
name = models.CharField(max_length=20, unique=True)
bestchild = models.ForeignKey('Child', models.SET_NULL, null=True, related_name='favored_by')
class Child(models.Model):
name = models.CharField(max_length=20)
parent = models.ForeignKey(Parent, models.CASCADE)
class ToFieldChild(models.Model):
parent = models.ForeignKey(Parent, models.CASCADE, to_field='name')
# Multiple paths to the same model (#7110, #7125)
@python_2_unicode_compatible
class Category(models.Model):
name = models.CharField(max_length=20)
def __str__(self):
return self.name
class Record(models.Model):
category = models.ForeignKey(Category, models.CASCADE)
@python_2_unicode_compatible
class Relation(models.Model):
left = models.ForeignKey(Record, models.CASCADE, related_name='left_set')
right = models.ForeignKey(Record, models.CASCADE, related_name='right_set')
def __str__(self):
return "%s - %s" % (self.left.category.name, self.right.category.name)
# Test related objects visibility.
class SchoolManager(models.Manager):
def get_queryset(self):
return super(SchoolManager, self).get_queryset().filter(is_public=True)
class School(models.Model):
is_public = models.BooleanField(default=False)
objects = SchoolManager()
class Student(models.Model):
school = models.ForeignKey(School, models.CASCADE)
| bsd-3-clause |
frreiss/tensorflow-fred | tensorflow/python/kernel_tests/self_adjoint_eig_op_test.py | 5 | 9447 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.linalg_ops.self_adjoint_eig."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes as dtypes_lib
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradient_checker_v2
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.platform import test
def _AddTest(test_class, op_name, testcase_name, fn):
test_name = "_".join(["test", op_name, testcase_name])
if hasattr(test_class, test_name):
raise RuntimeError("Test %s defined more than once" % test_name)
setattr(test_class, test_name, fn)
@test_util.run_all_without_tensor_float_32
class SelfAdjointEigTest(test.TestCase):
@test_util.run_deprecated_v1
def testWrongDimensions(self):
# The input to self_adjoint_eig should be a tensor of
# at least rank 2.
scalar = constant_op.constant(1.)
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(scalar)
vector = constant_op.constant([1., 2.])
with self.assertRaises(ValueError):
linalg_ops.self_adjoint_eig(vector)
@test_util.run_deprecated_v1
def testConcurrentExecutesWithoutError(self):
all_ops = []
with self.session(use_gpu=True) as sess:
for compute_v_ in True, False:
matrix1 = random_ops.random_normal([5, 5], seed=42)
matrix2 = random_ops.random_normal([5, 5], seed=42)
if compute_v_:
e1, v1 = linalg_ops.self_adjoint_eig(matrix1)
e2, v2 = linalg_ops.self_adjoint_eig(matrix2)
all_ops += [e1, v1, e2, v2]
else:
e1 = linalg_ops.self_adjoint_eigvals(matrix1)
e2 = linalg_ops.self_adjoint_eigvals(matrix2)
all_ops += [e1, e2]
val = self.evaluate(all_ops)
self.assertAllEqual(val[0], val[2])
# The algorithm is slightly different for compute_v being True and False,
# so require approximate equality only here.
self.assertAllClose(val[2], val[4])
self.assertAllEqual(val[4], val[5])
self.assertAllEqual(val[1], val[3])
def testMatrixThatFailsWhenFlushingDenormsToZero(self):
# Test a 32x32 matrix which is known to fail if denorm floats are flushed to
# zero.
matrix = np.genfromtxt(
test.test_src_dir_path(
"python/kernel_tests/testdata/"
"self_adjoint_eig_fail_if_denorms_flushed.txt")).astype(np.float32)
self.assertEqual(matrix.shape, (32, 32))
matrix_tensor = constant_op.constant(matrix)
with self.session(use_gpu=True) as sess:
(e, v) = self.evaluate(linalg_ops.self_adjoint_eig(matrix_tensor))
self.assertEqual(e.size, 32)
self.assertAllClose(
np.matmul(v, v.transpose()), np.eye(32, dtype=np.float32), atol=2e-3)
self.assertAllClose(matrix,
np.matmul(np.matmul(v, np.diag(e)), v.transpose()))
def SortEigenDecomposition(e, v):
if v.ndim < 2:
return e, v
else:
perm = np.argsort(e, -1)
return np.take(e, perm, -1), np.take(v, perm, -1)
def EquilibrateEigenVectorPhases(x, y):
"""Equilibrate the phase of the Eigenvectors in the columns of `x` and `y`.
Eigenvectors are only unique up to an arbitrary phase. This function rotates x
such that it matches y. Precondition: The columns of x and y differ by a
multiplicative complex phase factor only.
Args:
x: `np.ndarray` with Eigenvectors
y: `np.ndarray` with Eigenvectors
Returns:
`np.ndarray` containing an equilibrated version of x.
"""
phases = np.sum(np.conj(x) * y, -2, keepdims=True)
phases /= np.abs(phases)
return phases * x
def _GetSelfAdjointEigTest(dtype_, shape_, compute_v_):
def CompareEigenVectors(self, x, y, tol):
x = EquilibrateEigenVectorPhases(x, y)
self.assertAllClose(x, y, atol=tol)
def CompareEigenDecompositions(self, x_e, x_v, y_e, y_v, tol):
num_batches = int(np.prod(x_e.shape[:-1]))
n = x_e.shape[-1]
x_e = np.reshape(x_e, [num_batches] + [n])
x_v = np.reshape(x_v, [num_batches] + [n, n])
y_e = np.reshape(y_e, [num_batches] + [n])
y_v = np.reshape(y_v, [num_batches] + [n, n])
for i in range(num_batches):
x_ei, x_vi = SortEigenDecomposition(x_e[i, :], x_v[i, :, :])
y_ei, y_vi = SortEigenDecomposition(y_e[i, :], y_v[i, :, :])
self.assertAllClose(x_ei, y_ei, atol=tol, rtol=tol)
CompareEigenVectors(self, x_vi, y_vi, tol)
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
atol = 1e-4
else:
atol = 1e-12
np_e, np_v = np.linalg.eigh(a)
with self.session(use_gpu=True):
if compute_v_:
tf_e, tf_v = linalg_ops.self_adjoint_eig(constant_op.constant(a))
# Check that V*diag(E)*V^T is close to A.
a_ev = test_util.matmul_without_tf32(
test_util.matmul_without_tf32(tf_v, array_ops.matrix_diag(tf_e)),
tf_v,
adjoint_b=True)
self.assertAllClose(self.evaluate(a_ev), a, atol=atol)
# Compare to numpy.linalg.eigh.
CompareEigenDecompositions(self, np_e, np_v, self.evaluate(tf_e),
self.evaluate(tf_v), atol)
else:
tf_e = linalg_ops.self_adjoint_eigvals(constant_op.constant(a))
self.assertAllClose(
np.sort(np_e, -1), np.sort(self.evaluate(tf_e), -1), atol=atol)
return Test
class SelfAdjointEigGradTest(test.TestCase):
pass # Filled in below
def _GetSelfAdjointEigGradTest(dtype_, shape_, compute_v_):
def Test(self):
np.random.seed(1)
n = shape_[-1]
batch_shape = shape_[:-2]
np_dtype = dtype_.as_numpy_dtype
def RandomInput():
a = np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
if dtype_.is_complex:
a += 1j * np.random.uniform(
low=-1.0, high=1.0, size=n * n).reshape([n, n]).astype(np_dtype)
a += np.conj(a.T)
a = np.tile(a, batch_shape + (1, 1))
return a
# Optimal stepsize for central difference is O(epsilon^{1/3}).
epsilon = np.finfo(np_dtype).eps
delta = 0.1 * epsilon**(1.0 / 3.0)
# tolerance obtained by looking at actual differences using
# np.linalg.norm(theoretical-numerical, np.inf) on -mavx build
# after discarding one random input sample
_ = RandomInput()
if dtype_ in (dtypes_lib.float32, dtypes_lib.complex64):
tol = 1e-2
else:
tol = 1e-7
with self.session(use_gpu=True):
def Compute(x):
e, v = linalg_ops.self_adjoint_eig(x)
# (complex) Eigenvectors are only unique up to an arbitrary phase
# We normalize the vectors such that the first component has phase 0.
top_rows = v[..., 0:1, :]
if dtype_.is_complex:
angle = -math_ops.angle(top_rows)
phase = math_ops.complex(math_ops.cos(angle), math_ops.sin(angle))
else:
phase = math_ops.sign(top_rows)
v *= phase
return e, v
if compute_v_:
funcs = [lambda x: Compute(x)[0], lambda x: Compute(x)[1]]
else:
funcs = [linalg_ops.self_adjoint_eigvals]
for f in funcs:
theoretical, numerical = gradient_checker_v2.compute_gradient(
f,
[RandomInput()],
delta=delta)
self.assertAllClose(theoretical, numerical, atol=tol, rtol=tol)
return Test
if __name__ == "__main__":
dtypes_to_test = [
dtypes_lib.float32, dtypes_lib.float64, dtypes_lib.complex64,
dtypes_lib.complex128
]
for compute_v in True, False:
for dtype in dtypes_to_test:
for size in 1, 2, 5, 10:
for batch_dims in [(), (3,)] + [(3, 2)] * (max(size, size) < 10):
shape = batch_dims + (size, size)
name = "%s_%s_%s" % (dtype.name, "_".join(map(str, shape)), compute_v)
_AddTest(SelfAdjointEigTest, "SelfAdjointEig", name,
_GetSelfAdjointEigTest(dtype, shape, compute_v))
_AddTest(SelfAdjointEigGradTest, "SelfAdjointEigGrad", name,
_GetSelfAdjointEigGradTest(dtype, shape, compute_v))
test.main()
| apache-2.0 |
foreverfaint/scrapy | scrapy/tests/test_downloadermiddleware_ajaxcrawlable.py | 14 | 2482 | import unittest
from scrapy.contrib.downloadermiddleware.ajaxcrawl import AjaxCrawlMiddleware
from scrapy.spider import Spider
from scrapy.http import Request, HtmlResponse, Response
from scrapy.utils.test import get_crawler
__doctests__ = ['scrapy.contrib.downloadermiddleware.ajaxcrawl']
class AjaxCrawlMiddlewareTest(unittest.TestCase):
def setUp(self):
self.spider = Spider('foo')
crawler = get_crawler({'AJAXCRAWL_ENABLED': True})
self.mw = AjaxCrawlMiddleware.from_crawler(crawler)
def _ajaxcrawlable_body(self):
return '<html><head><meta name="fragment" content="!"/></head><body></body></html>'
def _req_resp(self, url, req_kwargs=None, resp_kwargs=None):
req = Request(url, **(req_kwargs or {}))
resp = HtmlResponse(url, request=req, **(resp_kwargs or {}))
return req, resp
def test_non_get(self):
req, resp = self._req_resp('http://example.com/', {'method': 'HEAD'})
resp2 = self.mw.process_response(req, resp, self.spider)
self.assertEqual(resp, resp2)
def test_binary_response(self):
req = Request('http://example.com/')
resp = Response('http://example.com/', body=b'foobar\x00\x01\x02', request=req)
resp2 = self.mw.process_response(req, resp, self.spider)
self.assertIs(resp, resp2)
def test_ajaxcrawl(self):
req, resp = self._req_resp(
'http://example.com/',
{'meta': {'foo': 'bar'}},
{'body': self._ajaxcrawlable_body()}
)
req2 = self.mw.process_response(req, resp, self.spider)
self.assertEqual(req2.url, 'http://example.com/?_escaped_fragment_=')
self.assertEqual(req2.meta['foo'], 'bar')
def test_ajaxcrawl_loop(self):
req, resp = self._req_resp('http://example.com/', {}, {'body': self._ajaxcrawlable_body()})
req2 = self.mw.process_response(req, resp, self.spider)
resp2 = HtmlResponse(req2.url, body=resp.body, request=req2)
resp3 = self.mw.process_response(req2, resp2, self.spider)
assert isinstance(resp3, HtmlResponse), (resp3.__class__, resp3)
self.assertEqual(resp3.request.url, 'http://example.com/?_escaped_fragment_=')
assert resp3 is resp2
def test_noncrawlable_body(self):
req, resp = self._req_resp('http://example.com/', {}, {'body': '<html></html>'})
resp2 = self.mw.process_response(req, resp, self.spider)
self.assertIs(resp, resp2)
| bsd-3-clause |
stonneau/cwc_tests | src/tools/plot_utils.py | 2 | 11856 | # -*- coding: utf-8 -*-
"""
Created on Fri Jan 16 09:16:56 2015
@author: adelpret
"""
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import numpy as np
DEFAULT_FONT_SIZE = 40;
DEFAULT_AXIS_FONT_SIZE = DEFAULT_FONT_SIZE;
DEFAULT_LINE_WIDTH = 8; #13;
DEFAULT_MARKER_SIZE = 6;
DEFAULT_FONT_FAMILY = 'sans-serif'
DEFAULT_FONT_SIZE = DEFAULT_FONT_SIZE;
DEFAULT_FONT_SERIF = ['Times New Roman', 'Times','Bitstream Vera Serif', 'DejaVu Serif', 'New Century Schoolbook', 'Century Schoolbook L', 'Utopia', 'ITC Bookman', 'Bookman', 'Nimbus Roman No9 L', 'Palatino', 'Charter', 'serif'];
DEFAULT_FIGURE_FACE_COLOR = 'white' # figure facecolor; 0.75 is scalar gray
DEFAULT_LEGEND_FONT_SIZE = DEFAULT_FONT_SIZE;
DEFAULT_AXES_LABEL_SIZE = DEFAULT_FONT_SIZE; # fontsize of the x any y labels
DEFAULT_TEXT_USE_TEX = True;
LINE_ALPHA = 0.9;
SAVE_FIGURES = False;
FILE_EXTENSIONS = ['png']; #,'eps'];
FIGURES_DPI = 150;
SHOW_LEGENDS = False;
LEGEND_ALPHA = 0.5;
SHOW_FIGURES = False;
FIGURE_PATH = './';
LINE_WIDTH_RED = 0; # reduction of line width when plotting multiple lines on same plot
LINE_WIDTH_MIN = 1;
BOUNDS_COLOR = 'silver';
#legend.framealpha : 1.0 # opacity of of legend frame
#axes.hold : True # whether to clear the axes by default on
#axes.linewidth : 1.0 # edge linewidth
#axes.titlesize : large # fontsize of the axes title
#axes.color_cycle : b, g, r, c, m, y, k # color cycle for plot lines
#xtick.labelsize : medium # fontsize of the tick labels
#figure.dpi : 80 # figure dots per inch
#image.cmap : jet # gray | jet etc...
#savefig.dpi : 100 # figure dots per inch
#savefig.facecolor : white # figure facecolor when saving
#savefig.edgecolor : white # figure edgecolor when saving
#savefig.format : png # png, ps, pdf, svg
#savefig.jpeg_quality: 95 # when a jpeg is saved, the default quality parameter.
#savefig.directory : ~ # default directory in savefig dialog box,
# leave empty to always use current working directory
def create_empty_figure(nRows=1, nCols=1, spinesPos=None,sharex=True):
f, ax = plt.subplots(nRows,nCols,sharex=sharex);
mngr = plt.get_current_fig_manager()
mngr.window.setGeometry(50,50,1080,720);
if(spinesPos!=None):
if(nRows*nCols>1):
for axis in ax.reshape(nRows*nCols):
movePlotSpines(axis, spinesPos);
else:
movePlotSpines(ax, spinesPos);
return (f, ax);
def movePlotSpines(ax, spinesPos):
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.spines['bottom'].set_position(('data',spinesPos[0]))
ax.yaxis.set_ticks_position('left')
ax.spines['left'].set_position(('data',spinesPos[1]))
def setAxisFontSize(ax, size):
for label in ax.get_xticklabels() + ax.get_yticklabels():
label.set_fontsize(size)
label.set_bbox(dict(facecolor='white', edgecolor='None', alpha=0.65))
mpl.rcdefaults()
mpl.rcParams['lines.linewidth'] = DEFAULT_LINE_WIDTH;
mpl.rcParams['lines.markersize'] = DEFAULT_MARKER_SIZE;
mpl.rcParams['font.family'] = DEFAULT_FONT_FAMILY;
mpl.rcParams['font.size'] = DEFAULT_FONT_SIZE;
mpl.rcParams['font.serif'] = DEFAULT_FONT_SERIF;
mpl.rcParams['text.usetex'] = DEFAULT_TEXT_USE_TEX;
mpl.rcParams['axes.labelsize'] = DEFAULT_AXES_LABEL_SIZE;
mpl.rcParams['legend.fontsize'] = DEFAULT_LEGEND_FONT_SIZE;
mpl.rcParams['figure.facecolor'] = DEFAULT_FIGURE_FACE_COLOR;
mpl.rcParams['figure.figsize'] = 12, 9 #23, 12 #
def plot3dQuantity(quantity, title, ax=None, boundUp=None, boundLow=None, yscale='linear', linestyle='k'):
return plotNdQuantity(3, 1, quantity, title, ax, boundUp, boundLow, yscale, linestyle);
def plotNdQuantity(nRows, nCols, quantity, title="", ax=None, boundUp=None, boundLow=None, yscale='linear',
linestyle='k--', sharey=False, margins=None):
t = quantity.shape[0];
n = quantity.shape[1];
if(margins!=None):
if(type(margins) is list):
margins = [margins[0].reshape(t,1,n), margins[1].reshape(t,1,n)];
else:
margins = margins.reshape(t,1,n);
return plotNdQuantityPerSolver(nRows, nCols, quantity.reshape(t,1,n), title, None, [linestyle], ax,
boundUp, boundLow, yscale, None, None, sharey, margins);
def plotNdQuantityPerSolver(nRows, nCols, quantity, title, solver_names, line_styles, ax=None, boundUp=None, boundLow=None,
yscale='linear', subplot_titles=None, ylabels=None, sharey=False, margins=None, x=None):
if(ax==None):
f, ax = plt.subplots(nRows, nCols, sharex=True, sharey=sharey);
ax = ax.reshape(nRows, nCols);
k = 0;
if(x==None):
x = range(quantity.shape[0]);
for j in range(nCols):
for i in range(nRows):
if(k<quantity.shape[2]):
if(subplot_titles!=None):
ax[i,j].set_title(subplot_titles[k]);
elif(i==0):
ax[i,j].set_title(str(k)); # set titles on first row only
if(ylabels!=None):
ax[i,j].set_ylabel(ylabels[k]);
ymin = np.min(quantity[:,:,k]);
ymax = np.max(quantity[:,:,k]);
if(boundUp!=None):
if(len(boundUp.shape)==1): # constant bound
if(boundUp[k]<2*ymax):
ymax = np.max([ymax,boundUp[k]]);
ax[i,j].plot([0, quantity.shape[0]-1], [boundUp[k], boundUp[k]], '--', color=BOUNDS_COLOR, alpha=LINE_ALPHA);
elif(len(boundUp.shape)==2): # bound variable in time but constant for each solver
if(np.max(boundUp[:,k])<2*ymax):
ymax = np.max(np.concatenate(([ymax],boundUp[:,k])));
ax[i,j].plot(boundUp[:,k], '--', color=BOUNDS_COLOR, label='Upper bound', alpha=LINE_ALPHA);
if(boundLow!=None):
if(len(boundLow.shape)==1):
if(boundLow[k]>2*ymin):
ymin = np.min([ymin,boundLow[k]]);
ax[i,j].plot([0, quantity.shape[0]-1], [boundLow[k], boundLow[k]], '--', color=BOUNDS_COLOR, alpha=LINE_ALPHA);
else:
if(np.min(boundLow[:,k])>2*ymin):
ymin = np.min(np.concatenate(([ymin],boundLow[:,k])));
ax[i,j].plot(boundLow[:,k], '--', color=BOUNDS_COLOR, label='Lower bound', alpha=LINE_ALPHA);
lw = DEFAULT_LINE_WIDTH;
for s in range(quantity.shape[1]):
p, = ax[i,j].plot(x, quantity[:,s,k], line_styles[s], alpha=LINE_ALPHA, linewidth=lw);
if(margins!=None):
if(type(margins) is list):
mp = margins[0];
mn = margins[1];
else:
mp = margins;
mn = margins;
ymax = np.max(np.concatenate(([ymax],quantity[:,s,k]+mp[:,s,k])));
ymin = np.min(np.concatenate(([ymin],quantity[:,s,k]-mn[:,s,k])));
ax[i,j].fill_between(x, quantity[:,s,k]+mp[:,s,k], quantity[:,s,k]-mn[:,s,k], alpha=0.15, linewidth=0, facecolor='green');
if(solver_names!=None):
p.set_label(solver_names[s]);
lw=max(LINE_WIDTH_MIN,lw-LINE_WIDTH_RED);
ax[i,j].set_yscale(yscale);
ax[i,j].xaxis.set_ticks(np.arange(0, x[-1], x[-1]/2));
ax[i,j].yaxis.set_ticks([ymin, ymax]);
if(ymax-ymin>5.0):
ax[i,j].yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.0f'));
elif(ymax-ymin>0.5):
ax[i,j].yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.1f'));
else:
ax[i,j].yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.2f'));
if(sharey==False):
ax[i,j].set_ylim([ymin-0.1*(ymax-ymin), ymax+0.1*(ymax-ymin)]);
k += 1;
else:
ax[i,j].yaxis.set_major_formatter(ticker.FormatStrFormatter('%0.0f'));
if(SAVE_FIGURES):
for ext in FILE_EXTENSIONS:
plt.gcf().savefig(FIGURE_PATH+title.replace(' ', '_')+'.'+ext, format=ext, dpi=FIGURES_DPI, bbox_inches='tight');
else:
ax[nRows/2,0].set_ylabel(title);
if(SHOW_LEGENDS):
# leg = ax[0,0].legend(bbox_to_anchor=(0., 1.02, 1., .102), loc=3, ncol=2, mode="expand", borderaxespad=0.)
leg = ax[0,0].legend(loc='best');
# leg.get_frame().set_alpha(LEGEND_ALPHA)
return ax;
def plotQuantityPerSolver(quantity, title, solver_names, line_styles, yscale='linear', ylabel='',
x=None, xlabel='', legend_location='best'):
f, ax = plt.subplots();
lw = DEFAULT_LINE_WIDTH;
if(x==None):
x = range(quantity.shape[0]);
for i in range(len(solver_names)):
ax.plot(x, quantity[:,i], line_styles[i], alpha=LINE_ALPHA, linewidth=lw);
lw=max(lw-LINE_WIDTH_RED,LINE_WIDTH_MIN);
ax.set_yscale(yscale);
ax.set_ylabel(ylabel);
ax.set_xlabel(xlabel);
ymin = np.min(quantity);
ymax = np.max(quantity);
ax.set_ylim([ymin-0.1*(ymax-ymin), ymax+0.1*(ymax-ymin)]);
if(SHOW_LEGENDS):
leg = ax.legend(solver_names, loc=legend_location);
leg.get_frame().set_alpha(LEGEND_ALPHA)
if(SAVE_FIGURES):
for ext in FILE_EXTENSIONS:
plt.gcf().savefig(FIGURE_PATH+title.replace(' ', '_')+'.'+ext, format=ext, dpi=FIGURES_DPI, bbox_inches='tight');
elif(ylabel==''):
ax.set_ylabel(title);
def plotQuantityVsQuantityPerSolver(quantity, quantityPerSolver, legend, solver_names, line_styles, yscale='linear'):
r=0;
c=0;
if(len(solver_names)==4 or len(solver_names)==3):
r=2;
c=2;
elif(len(solver_names)==5 or len(solver_names)==6):
r=2;
c=3;
else:
print "ERROR in plotQuantityVsQuantityPerSolver, number of solvers not managed";
return;
f, ax = plt.subplots(r, c, sharex=True, sharey=True);
for i in range(len(solver_names)):
ax[i/c,i%c].plot(quantity[:,i], 'kx-', quantityPerSolver[:,i], line_styles[i], alpha=LINE_ALPHA);
ax[i/c,i%c].set_ylabel(solver_names[i]);
ax[i/c,i%c].set_yscale(yscale);
if(SAVE_FIGURES):
for ext in FILE_EXTENSIONS:
f.savefig(FIGURE_PATH+(legend[0]+'_VS_'+legend[1]).replace(' ', '_')+'.'+ext, format=ext, dpi=FIGURES_DPI, bbox_inches='tight');
if(SHOW_LEGENDS):
leg = ax[0,0].legend(legend, loc='best');
leg.get_frame().set_alpha(LEGEND_ALPHA)
def grayify_cmap(cmap):
"""Return a grayscale version of the colormap"""
cmap = plt.cm.get_cmap(cmap)
colors = cmap(np.arange(cmap.N))
# convert RGBA to perceived greyscale luminance
# cf. http://alienryderflex.com/hsp.html
RGB_weight = [0.299, 0.587, 0.114]
luminance = np.sqrt(np.dot(colors[:, :3] ** 2, RGB_weight))
colors[:, :3] = luminance[:, np.newaxis]
return cmap.from_list(cmap.name + "_grayscale", colors, cmap.N)
def saveFigure(title):
if(SAVE_FIGURES):
for ext in FILE_EXTENSIONS:
plt.gcf().savefig(FIGURE_PATH+title.replace(' ', '_')+'.'+ext, format=ext, dpi=FIGURES_DPI, bbox_inches='tight'); | gpl-3.0 |
larroy/mxnet | python/mxnet/numpy/arrayprint.py | 11 | 2096 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""ndarray print format controller."""
import numpy as onp
from ..util import set_module
__all__ = ['set_printoptions']
@set_module('mxnet.numpy')
def set_printoptions(precision=None, threshold=None, **kwarg):
"""
Set printing options.
These options determine the way floating point numbers and arrays are displayed.
Parameters
----------
precision : int or None, optional
Number of digits of precision for floating point output (default 8).
May be `None` if `floatmode` is not `fixed`, to print as many digits as
necessary to uniquely specify the value.
threshold : int, optional
Total number of array elements which trigger summarization
rather than full repr (default 1000).
Examples
--------
Floating point precision can be set:
>>> np.set_printoptions(precision=4)
>>> print(np.array([1.123456789]))
[ 1.1235]
Long arrays can be summarised:
>>> np.set_printoptions(threshold=5)
>>> print(np.arange(10))
[0. 1. 2. ... 7. 8. 9.]
"""
if kwarg:
raise NotImplementedError('mxnet.numpy.set_printoptions only supports parameters'
' precision and threshold for now.')
onp.set_printoptions(precision=precision, threshold=threshold, **kwarg)
| apache-2.0 |
eleonrk/SickRage | lib/hachoir_core/field/byte_field.py | 95 | 2190 | """
Very basic field: raw content with a size in byte. Use this class for
unknown content.
"""
from hachoir_core.field import Field, FieldError
from hachoir_core.tools import makePrintable
from hachoir_core.bits import str2hex
from hachoir_core import config
MAX_LENGTH = (2**64)
class RawBytes(Field):
"""
Byte vector of unknown content
@see: L{Bytes}
"""
static_size = staticmethod(lambda *args, **kw: args[1]*8)
def __init__(self, parent, name, length, description="Raw data"):
assert issubclass(parent.__class__, Field)
if not(0 < length <= MAX_LENGTH):
raise FieldError("Invalid RawBytes length (%s)!" % length)
Field.__init__(self, parent, name, length*8, description)
self._display = None
def _createDisplay(self, human):
max_bytes = config.max_byte_length
if type(self._getValue) is type(lambda: None):
display = self.value[:max_bytes]
else:
if self._display is None:
address = self.absolute_address
length = min(self._size / 8, max_bytes)
self._display = self._parent.stream.readBytes(address, length)
display = self._display
truncated = (8 * len(display) < self._size)
if human:
if truncated:
display += "(...)"
return makePrintable(display, "latin-1", quote='"', to_unicode=True)
else:
display = str2hex(display, format=r"\x%02x")
if truncated:
return '"%s(...)"' % display
else:
return '"%s"' % display
def createDisplay(self):
return self._createDisplay(True)
def createRawDisplay(self):
return self._createDisplay(False)
def hasValue(self):
return True
def createValue(self):
assert (self._size % 8) == 0
if self._display:
self._display = None
return self._parent.stream.readBytes(
self.absolute_address, self._size / 8)
class Bytes(RawBytes):
"""
Byte vector: can be used for magic number or GUID/UUID for example.
@see: L{RawBytes}
"""
pass
| gpl-3.0 |
ParfenovS/lime | doc/conf.py | 2 | 8202 | #
# LIME documentation build configuration file, created by
# sphinx-quickstart on Mon Jan 5 23:34:05 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
needs_sphinx = '1.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.mathjax'
]
# Automatically number figures
numfig = True
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'LIME'
copyright = u'2016-2014 Christian Brinch, 2015 The LIME development team'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
# The full version, including alpha/beta/rc tags.
release = ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'LIMEdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'LIME.tex', u'LIME Documentation',
u'Christian Brinch', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'lime', u'LIME Documentation',
[u'Christian Brinch'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'LIME', u'LIME Documentation',
u'Christian Brinch', 'LIME', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| gpl-3.0 |
Tatsh-ansible/ansible | lib/ansible/parsing/mod_args.py | 12 | 12568 | # (c) 2014 Michael DeHaan, <michael@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.errors import AnsibleParserError, AnsibleError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv, split_args
from ansible.plugins import module_loader, action_loader
from ansible.template import Templar
# For filtering out modules correctly below
RAW_PARAM_MODULES = ([
'command',
'win_command',
'shell',
'win_shell',
'script',
'include',
'include_vars',
'include_tasks',
'include_role',
'import_tasks',
'import_role',
'add_host',
'group_by',
'set_fact',
'raw',
'meta',
])
class ModuleArgsParser:
"""
There are several ways a module and argument set can be expressed:
# legacy form (for a shell command)
- action: shell echo hi
# common shorthand for local actions vs delegate_to
- local_action: shell echo hi
# most commonly:
- copy: src=a dest=b
# legacy form
- action: copy src=a dest=b
# complex args form, for passing structured data
- copy:
src: a
dest: b
# gross, but technically legal
- action:
module: copy
args:
src: a
dest: b
# Standard YAML form for command-type modules. In this case, the args specified
# will act as 'defaults' and will be overridden by any args specified
# in one of the other formats (complex args under the action, or
# parsed from the k=v string
- command: 'pwd'
args:
chdir: '/tmp'
This class has some of the logic to canonicalize these into the form
- module: <module_name>
delegate_to: <optional>
args: <args>
Args may also be munged for certain shell command parameters.
"""
# FIXME: mutable default arg
def __init__(self, task_ds=dict()):
assert isinstance(task_ds, dict), "the type of 'task_ds' should be a dict, but is a %s" % type(task_ds)
self._task_ds = task_ds
def _split_module_string(self, module_string):
'''
when module names are expressed like:
action: copy src=a dest=b
the first part of the string is the name of the module
and the rest are strings pertaining to the arguments.
'''
tokens = split_args(module_string)
if len(tokens) > 1:
return (tokens[0], " ".join(tokens[1:]))
else:
return (tokens[0], "")
def _handle_shell_weirdness(self, action, args):
'''
given an action name and an args dictionary, return the
proper action name and args dictionary. This mostly is due
to shell/command being treated special and nothing else
'''
# the shell module really is the command module with an additional
# parameter
if action == 'shell':
action = 'command'
args['_uses_shell'] = True
return (action, args)
def _normalize_parameters(self, thing, action=None, additional_args=dict()):
'''
arguments can be fuzzy. Deal with all the forms.
'''
# final args are the ones we'll eventually return, so first update
# them with any additional args specified, which have lower priority
# than those which may be parsed/normalized next
final_args = dict()
if additional_args:
if isinstance(additional_args, string_types):
templar = Templar(loader=None)
if templar._contains_vars(additional_args):
final_args['_variable_params'] = additional_args
else:
raise AnsibleParserError("Complex args containing variables cannot use bare variables, and must use the full variable style "
"('{{var_name}}')")
elif isinstance(additional_args, dict):
final_args.update(additional_args)
else:
raise AnsibleParserError('Complex args must be a dictionary or variable string ("{{var}}").')
# how we normalize depends if we figured out what the module name is
# yet. If we have already figured it out, it's a 'new style' invocation.
# otherwise, it's not
if action is not None:
args = self._normalize_new_style_args(thing, action)
else:
(action, args) = self._normalize_old_style_args(thing)
# this can occasionally happen, simplify
if args and 'args' in args:
tmp_args = args.pop('args')
if isinstance(tmp_args, string_types):
tmp_args = parse_kv(tmp_args)
args.update(tmp_args)
# only internal variables can start with an underscore, so
# we don't allow users to set them directly in arguments
if args and action not in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw'):
for arg in args:
arg = to_text(arg)
if arg.startswith('_ansible_'):
raise AnsibleError("invalid parameter specified for action '%s': '%s'" % (action, arg))
# finally, update the args we're going to return with the ones
# which were normalized above
if args:
final_args.update(args)
return (action, final_args)
def _normalize_new_style_args(self, thing, action):
'''
deals with fuzziness in new style module invocations
accepting key=value pairs and dictionaries, and returns
a dictionary of arguments
possible example inputs:
'echo hi', 'shell'
{'region': 'xyz'}, 'ec2'
standardized outputs like:
{ _raw_params: 'echo hi', _uses_shell: True }
'''
if isinstance(thing, dict):
# form is like: { xyz: { x: 2, y: 3 } }
args = thing
elif isinstance(thing, string_types):
# form is like: copy: src=a dest=b
check_raw = action in ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
args = parse_kv(thing, check_raw=check_raw)
elif thing is None:
# this can happen with modules which take no params, like ping:
args = None
else:
raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
return args
def _normalize_old_style_args(self, thing):
'''
deals with fuzziness in old-style (action/local_action) module invocations
returns tuple of (module_name, dictionary_args)
possible example inputs:
{ 'shell' : 'echo hi' }
'shell echo hi'
{'module': 'ec2', 'x': 1 }
standardized outputs like:
('ec2', { 'x': 1} )
'''
action = None
args = None
actions_allowing_raw = ('command', 'win_command', 'shell', 'win_shell', 'script', 'raw')
if isinstance(thing, dict):
# form is like: action: { module: 'copy', src: 'a', dest: 'b' }
thing = thing.copy()
if 'module' in thing:
action, module_args = self._split_module_string(thing['module'])
args = thing.copy()
check_raw = action in actions_allowing_raw
args.update(parse_kv(module_args, check_raw=check_raw))
del args['module']
elif isinstance(thing, string_types):
# form is like: action: copy src=a dest=b
(action, args) = self._split_module_string(thing)
check_raw = action in actions_allowing_raw
args = parse_kv(args, check_raw=check_raw)
else:
# need a dict or a string, so giving up
raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
return (action, args)
def parse(self):
'''
Given a task in one of the supported forms, parses and returns
returns the action, arguments, and delegate_to values for the
task, dealing with all sorts of levels of fuzziness.
'''
thing = None
action = None
delegate_to = self._task_ds.get('delegate_to', None)
args = dict()
# This is the standard YAML form for command-type modules. We grab
# the args and pass them in as additional arguments, which can/will
# be overwritten via dict updates from the other arg sources below
additional_args = self._task_ds.get('args', dict())
# We can have one of action, local_action, or module specified
# action
if 'action' in self._task_ds:
# an old school 'action' statement
thing = self._task_ds['action']
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# local_action
if 'local_action' in self._task_ds:
# local_action is similar but also implies a delegate_to
if action is not None:
raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds)
thing = self._task_ds.get('local_action', '')
delegate_to = 'localhost'
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# module: <stuff> is the more new-style invocation
# walk the input dictionary to see we recognize a module name
for (item, value) in iteritems(self._task_ds):
if item in module_loader or item in action_loader or item in ['meta', 'include', 'include_tasks', 'include_role', 'import_tasks', 'import_role']:
# finding more than one module name is a problem
if action is not None:
raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds)
action = item
thing = value
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# if we didn't see any module in the task at all, it's not a task really
if action is None:
if 'ping' not in module_loader:
raise AnsibleParserError("The requested action was not found in configured module paths. "
"Additionally, core modules are missing. If this is a checkout, "
"run 'git pull --rebase' to correct this problem.",
obj=self._task_ds)
else:
raise AnsibleParserError("no action detected in task. This often indicates a misspelled module name, or incorrect module path.",
obj=self._task_ds)
elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES:
templar = Templar(loader=None)
raw_params = args.pop('_raw_params')
if templar._contains_vars(raw_params):
args['_variable_params'] = raw_params
else:
raise AnsibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action,
", ".join(RAW_PARAM_MODULES)),
obj=self._task_ds)
# shell modules require special handling
(action, args) = self._handle_shell_weirdness(action, args)
return (action, args, delegate_to)
| gpl-3.0 |
jenfly/atmos-read | scripts/fram/download_near_surface.py | 1 | 1527 | import sys
sys.path.append('/home/jwalker/dynamics/python/atmos-tools')
sys.path.append('/home/jwalker/dynamics/python/atmos-read')
import os
import numpy as np
import merra
import atmos as atm
# ----------------------------------------------------------------------
# Save single pressure-level daily data for each month
savedir = atm.homedir() + 'datastore/merra/daily/'
#plev = 950
plev = 850
lon1, lon2 = 40, 120
lat1, lat2 = -60, 60
filestr = savedir + 'merra_%s%d_40E-120E_60S-60N_%d%02d.nc'
#varnms = ['T', 'H', 'QV', 'V']
varnms = ['T', 'H', 'QV']
#years = np.arange(1979, 2015)
#months = np.arange(4, 10)
years = np.arange(1979, 2000)
months = np.arange(1, 13)
nperday = 8
def filename(varname, plev, year, month, filestr):
filen = filestr % (varname, plev, year, month)
print('Saving to ' + filen)
return filen
subset_dict = {'plev' : (plev, plev), 'lon' : (lon1, lon2), 'lat' : (lat1, lat2)}
for varnm in varnms:
for year in years:
for m, month in enumerate(months):
datestr = '%d%02d' % (year, month)
print varnm
var = merra.read_daily(varnm, year, month, subset_dict=subset_dict)
# Compute daily means of 3-hourly data
dayvals = atm.season_days(atm.month_str(month), atm.isleap(year))
var = atm.daily_from_subdaily(var, nperday, dayname='Day',
dayvals=dayvals)
# Save to file
atm.save_nc(filename(varnm, plev, year, month, filestr), var)
| mit |
wiki05/youtube-dl | youtube_dl/extractor/vulture.py | 122 | 2455 | from __future__ import unicode_literals
import json
import os.path
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
parse_iso8601,
)
class VultureIE(InfoExtractor):
IE_NAME = 'vulture.com'
_VALID_URL = r'https?://video\.vulture\.com/video/(?P<display_id>[^/]+)/'
_TEST = {
'url': 'http://video.vulture.com/video/Mindy-Kaling-s-Harvard-Speech/player?layout=compact&read_more=1',
'md5': '8d997845642a2b5152820f7257871bc8',
'info_dict': {
'id': '6GHRQL3RV7MSD1H4',
'ext': 'mp4',
'title': 'kaling-speech-2-MAGNIFY STANDARD CONTAINER REVISED',
'uploader_id': 'Sarah',
'thumbnail': 're:^http://.*\.jpg$',
'timestamp': 1401288564,
'upload_date': '20140528',
'description': 'Uplifting and witty, as predicted.',
'duration': 1015,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
display_id = mobj.group('display_id')
webpage = self._download_webpage(url, display_id)
query_string = self._search_regex(
r"queryString\s*=\s*'([^']+)'", webpage, 'query string')
video_id = self._search_regex(
r'content=([^&]+)', query_string, 'video ID')
query_url = 'http://video.vulture.com/embed/player/container/1000/1000/?%s' % query_string
query_webpage = self._download_webpage(
query_url, display_id, note='Downloading query page')
params_json = self._search_regex(
r'(?sm)new MagnifyEmbeddablePlayer\({.*?contentItem:\s*(\{.*?\})\n?,\n',
query_webpage,
'player params')
params = json.loads(params_json)
upload_timestamp = parse_iso8601(params['posted'].replace(' ', 'T'))
uploader_id = params.get('user', {}).get('handle')
media_item = params['media_item']
title = os.path.splitext(media_item['title'])[0]
duration = int_or_none(media_item.get('duration_seconds'))
return {
'id': video_id,
'display_id': display_id,
'url': media_item['pipeline_xid'],
'title': title,
'timestamp': upload_timestamp,
'thumbnail': params.get('thumbnail_url'),
'uploader_id': uploader_id,
'description': params.get('description'),
'duration': duration,
}
| unlicense |
sabi0/intellij-community | python/helpers/py2only/docutils/parsers/rst/directives/parts.py | 136 | 4251 | # $Id: parts.py 7308 2012-01-06 12:08:43Z milde $
# Authors: David Goodger <goodger@python.org>; Dmitry Jemerov
# Copyright: This module has been placed in the public domain.
"""
Directives for document parts.
"""
__docformat__ = 'reStructuredText'
from docutils import nodes, languages
from docutils.transforms import parts
from docutils.parsers.rst import Directive
from docutils.parsers.rst import directives
class Contents(Directive):
"""
Table of contents.
The table of contents is generated in two passes: initial parse and
transform. During the initial parse, a 'pending' element is generated
which acts as a placeholder, storing the TOC title and any options
internally. At a later stage in the processing, the 'pending' element is
replaced by a 'topic' element, a title and the table of contents proper.
"""
backlinks_values = ('top', 'entry', 'none')
def backlinks(arg):
value = directives.choice(arg, Contents.backlinks_values)
if value == 'none':
return None
else:
return value
optional_arguments = 1
final_argument_whitespace = True
option_spec = {'depth': directives.nonnegative_int,
'local': directives.flag,
'backlinks': backlinks,
'class': directives.class_option}
def run(self):
if not (self.state_machine.match_titles
or isinstance(self.state_machine.node, nodes.sidebar)):
raise self.error('The "%s" directive may not be used within '
'topics or body elements.' % self.name)
document = self.state_machine.document
language = languages.get_language(document.settings.language_code,
document.reporter)
if self.arguments:
title_text = self.arguments[0]
text_nodes, messages = self.state.inline_text(title_text,
self.lineno)
title = nodes.title(title_text, '', *text_nodes)
else:
messages = []
if 'local' in self.options:
title = None
else:
title = nodes.title('', language.labels['contents'])
topic = nodes.topic(classes=['contents'])
topic['classes'] += self.options.get('class', [])
# the latex2e writer needs source and line for a warning:
topic.source, topic.line = self.state_machine.get_source_and_line()
topic.line -= 1
if 'local' in self.options:
topic['classes'].append('local')
if title:
name = title.astext()
topic += title
else:
name = language.labels['contents']
name = nodes.fully_normalize_name(name)
if not document.has_name(name):
topic['names'].append(name)
document.note_implicit_target(topic)
pending = nodes.pending(parts.Contents, rawsource=self.block_text)
pending.details.update(self.options)
document.note_pending(pending)
topic += pending
return [topic] + messages
class Sectnum(Directive):
"""Automatic section numbering."""
option_spec = {'depth': int,
'start': int,
'prefix': directives.unchanged_required,
'suffix': directives.unchanged_required}
def run(self):
pending = nodes.pending(parts.SectNum)
pending.details.update(self.options)
self.state_machine.document.note_pending(pending)
return [pending]
class Header(Directive):
"""Contents of document header."""
has_content = True
def run(self):
self.assert_has_content()
header = self.state_machine.document.get_decoration().get_header()
self.state.nested_parse(self.content, self.content_offset, header)
return []
class Footer(Directive):
"""Contents of document footer."""
has_content = True
def run(self):
self.assert_has_content()
footer = self.state_machine.document.get_decoration().get_footer()
self.state.nested_parse(self.content, self.content_offset, footer)
return []
| apache-2.0 |
citrix-openstack-build/swift | swift/container/server.py | 2 | 23855 | # Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import os
import time
import traceback
from datetime import datetime
from swift import gettext_ as _
from xml.etree.cElementTree import Element, SubElement, tostring
from eventlet import Timeout
import swift.common.db
from swift.container.backend import ContainerBroker
from swift.common.db import DatabaseAlreadyExists
from swift.common.request_helpers import get_param, get_listing_content_type, \
split_and_validate_path
from swift.common.utils import get_logger, public, validate_sync_to, \
config_true_value, json, timing_stats, replication, \
override_bytes_from_content_type
from swift.common.ondisk import hash_path, normalize_timestamp, \
storage_directory
from swift.common.constraints import CONTAINER_LISTING_LIMIT, \
check_mount, check_float, check_utf8
from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout
from swift.common.db_replicator import ReplicatorRpc
from swift.common.http import HTTP_NOT_FOUND, is_success
from swift.common.swob import HTTPAccepted, HTTPBadRequest, HTTPConflict, \
HTTPCreated, HTTPInternalServerError, HTTPNoContent, HTTPNotFound, \
HTTPPreconditionFailed, HTTPMethodNotAllowed, Request, Response, \
HTTPInsufficientStorage, HTTPException, HeaderKeyDict
DATADIR = 'containers'
class ContainerController(object):
"""WSGI Controller for the container server."""
# Ensure these are all lowercase
save_headers = ['x-container-read', 'x-container-write',
'x-container-sync-key', 'x-container-sync-to']
def __init__(self, conf):
self.logger = get_logger(conf, log_route='container-server')
self.root = conf.get('devices', '/srv/node/')
self.mount_check = config_true_value(conf.get('mount_check', 'true'))
self.node_timeout = int(conf.get('node_timeout', 3))
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
replication_server = conf.get('replication_server', None)
if replication_server is not None:
replication_server = config_true_value(replication_server)
self.replication_server = replication_server
self.allowed_sync_hosts = [
h.strip()
for h in conf.get('allowed_sync_hosts', '127.0.0.1').split(',')
if h.strip()]
self.replicator_rpc = ReplicatorRpc(
self.root, DATADIR, ContainerBroker, self.mount_check,
logger=self.logger)
self.auto_create_account_prefix = \
conf.get('auto_create_account_prefix') or '.'
if config_true_value(conf.get('allow_versions', 'f')):
self.save_headers.append('x-versions-location')
swift.common.db.DB_PREALLOCATION = \
config_true_value(conf.get('db_preallocation', 'f'))
def _get_container_broker(self, drive, part, account, container, **kwargs):
"""
Get a DB broker for the container.
:param drive: drive that holds the container
:param part: partition the container is in
:param account: account name
:param container: container name
:returns: ContainerBroker object
"""
hsh = hash_path(account, container)
db_dir = storage_directory(DATADIR, part, hsh)
db_path = os.path.join(self.root, drive, db_dir, hsh + '.db')
kwargs.setdefault('account', account)
kwargs.setdefault('container', container)
kwargs.setdefault('logger', self.logger)
return ContainerBroker(db_path, **kwargs)
def account_update(self, req, account, container, broker):
"""
Update the account server(s) with latest container info.
:param req: swob.Request object
:param account: account name
:param container: container name
:param broker: container DB broker object
:returns: if all the account requests return a 404 error code,
HTTPNotFound response object,
if the account cannot be updated due to a malformed header,
an HTTPBadRequest response object,
otherwise None.
"""
account_hosts = [h.strip() for h in
req.headers.get('X-Account-Host', '').split(',')]
account_devices = [d.strip() for d in
req.headers.get('X-Account-Device', '').split(',')]
account_partition = req.headers.get('X-Account-Partition', '')
if len(account_hosts) != len(account_devices):
# This shouldn't happen unless there's a bug in the proxy,
# but if there is, we want to know about it.
self.logger.error(_('ERROR Account update failed: different '
'numbers of hosts and devices in request: '
'"%s" vs "%s"') %
(req.headers.get('X-Account-Host', ''),
req.headers.get('X-Account-Device', '')))
return HTTPBadRequest(req=req)
if account_partition:
updates = zip(account_hosts, account_devices)
else:
updates = []
account_404s = 0
for account_host, account_device in updates:
account_ip, account_port = account_host.rsplit(':', 1)
new_path = '/' + '/'.join([account, container])
info = broker.get_info()
account_headers = HeaderKeyDict({
'x-put-timestamp': info['put_timestamp'],
'x-delete-timestamp': info['delete_timestamp'],
'x-object-count': info['object_count'],
'x-bytes-used': info['bytes_used'],
'x-trans-id': req.headers.get('x-trans-id', '-'),
'user-agent': 'container-server %s' % os.getpid(),
'referer': req.as_referer()})
if req.headers.get('x-account-override-deleted', 'no').lower() == \
'yes':
account_headers['x-account-override-deleted'] = 'yes'
try:
with ConnectionTimeout(self.conn_timeout):
conn = http_connect(
account_ip, account_port, account_device,
account_partition, 'PUT', new_path, account_headers)
with Timeout(self.node_timeout):
account_response = conn.getresponse()
account_response.read()
if account_response.status == HTTP_NOT_FOUND:
account_404s += 1
elif not is_success(account_response.status):
self.logger.error(_(
'ERROR Account update failed '
'with %(ip)s:%(port)s/%(device)s (will retry '
'later): Response %(status)s %(reason)s'),
{'ip': account_ip, 'port': account_port,
'device': account_device,
'status': account_response.status,
'reason': account_response.reason})
except (Exception, Timeout):
self.logger.exception(_(
'ERROR account update failed with '
'%(ip)s:%(port)s/%(device)s (will retry later)'),
{'ip': account_ip, 'port': account_port,
'device': account_device})
if updates and account_404s == len(updates):
return HTTPNotFound(req=req)
else:
return None
@public
@timing_stats()
def DELETE(self, req):
"""Handle HTTP DELETE request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
if 'x-timestamp' not in req.headers or \
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=req,
content_type='text/plain')
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container)
if account.startswith(self.auto_create_account_prefix) and obj and \
not os.path.exists(broker.db_file):
try:
broker.initialize(normalize_timestamp(
req.headers.get('x-timestamp') or time.time()))
except DatabaseAlreadyExists:
pass
if not os.path.exists(broker.db_file):
return HTTPNotFound()
if obj: # delete object
broker.delete_object(obj, req.headers.get('x-timestamp'))
return HTTPNoContent(request=req)
else:
# delete container
if not broker.empty():
return HTTPConflict(request=req)
existed = float(broker.get_info()['put_timestamp']) and \
not broker.is_deleted()
broker.delete_db(req.headers['X-Timestamp'])
if not broker.is_deleted():
return HTTPConflict(request=req)
resp = self.account_update(req, account, container, broker)
if resp:
return resp
if existed:
return HTTPNoContent(request=req)
return HTTPNotFound()
@public
@timing_stats()
def PUT(self, req):
"""Handle HTTP PUT request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
if 'x-timestamp' not in req.headers or \
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing timestamp', request=req,
content_type='text/plain')
if 'x-container-sync-to' in req.headers:
err = validate_sync_to(req.headers['x-container-sync-to'],
self.allowed_sync_hosts)
if err:
return HTTPBadRequest(err)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
broker = self._get_container_broker(drive, part, account, container)
if obj: # put container object
if account.startswith(self.auto_create_account_prefix) and \
not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp)
except DatabaseAlreadyExists:
pass
if not os.path.exists(broker.db_file):
return HTTPNotFound()
broker.put_object(obj, timestamp, int(req.headers['x-size']),
req.headers['x-content-type'],
req.headers['x-etag'])
return HTTPCreated(request=req)
else: # put container
if not os.path.exists(broker.db_file):
try:
broker.initialize(timestamp)
created = True
except DatabaseAlreadyExists:
pass
else:
created = broker.is_deleted()
broker.update_put_timestamp(timestamp)
if broker.is_deleted():
return HTTPConflict(request=req)
metadata = {}
metadata.update(
(key, (value, timestamp))
for key, value in req.headers.iteritems()
if key.lower() in self.save_headers or
key.lower().startswith('x-container-meta-'))
if metadata:
if 'X-Container-Sync-To' in metadata:
if 'X-Container-Sync-To' not in broker.metadata or \
metadata['X-Container-Sync-To'][0] != \
broker.metadata['X-Container-Sync-To'][0]:
broker.set_x_container_sync_points(-1, -1)
broker.update_metadata(metadata)
resp = self.account_update(req, account, container, broker)
if resp:
return resp
if created:
return HTTPCreated(request=req)
else:
return HTTPAccepted(request=req)
@public
@timing_stats(sample_rate=0.1)
def HEAD(self, req):
"""Handle HTTP HEAD request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
out_content_type = get_listing_content_type(req)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container,
pending_timeout=0.1,
stale_reads_ok=True)
if broker.is_deleted():
return HTTPNotFound(request=req)
info = broker.get_info()
headers = {
'X-Container-Object-Count': info['object_count'],
'X-Container-Bytes-Used': info['bytes_used'],
'X-Timestamp': info['created_at'],
'X-PUT-Timestamp': info['put_timestamp'],
}
headers.update(
(key, value)
for key, (value, timestamp) in broker.metadata.iteritems()
if value != '' and (key.lower() in self.save_headers or
key.lower().startswith('x-container-meta-')))
headers['Content-Type'] = out_content_type
return HTTPNoContent(request=req, headers=headers, charset='utf-8')
def update_data_record(self, record):
"""
Perform any mutations to container listing records that are common to
all serialization formats, and returns it as a dict.
Converts created time to iso timestamp.
Replaces size with 'swift_bytes' content type parameter.
:params record: object entry record
:returns: modified record
"""
(name, created, size, content_type, etag) = record
if content_type is None:
return {'subdir': name}
response = {'bytes': size, 'hash': etag, 'name': name,
'content_type': content_type}
last_modified = datetime.utcfromtimestamp(float(created)).isoformat()
# python isoformat() doesn't include msecs when zero
if len(last_modified) < len("1970-01-01T00:00:00.000000"):
last_modified += ".000000"
response['last_modified'] = last_modified
override_bytes_from_content_type(response, logger=self.logger)
return response
@public
@timing_stats()
def GET(self, req):
"""Handle HTTP GET request."""
drive, part, account, container, obj = split_and_validate_path(
req, 4, 5, True)
path = get_param(req, 'path')
prefix = get_param(req, 'prefix')
delimiter = get_param(req, 'delimiter')
if delimiter and (len(delimiter) > 1 or ord(delimiter) > 254):
# delimiters can be made more flexible later
return HTTPPreconditionFailed(body='Bad delimiter')
marker = get_param(req, 'marker', '')
end_marker = get_param(req, 'end_marker')
limit = CONTAINER_LISTING_LIMIT
given_limit = get_param(req, 'limit')
if given_limit and given_limit.isdigit():
limit = int(given_limit)
if limit > CONTAINER_LISTING_LIMIT:
return HTTPPreconditionFailed(
request=req,
body='Maximum limit is %d' % CONTAINER_LISTING_LIMIT)
out_content_type = get_listing_content_type(req)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container,
pending_timeout=0.1,
stale_reads_ok=True)
if broker.is_deleted():
return HTTPNotFound(request=req)
info = broker.get_info()
resp_headers = {
'X-Container-Object-Count': info['object_count'],
'X-Container-Bytes-Used': info['bytes_used'],
'X-Timestamp': info['created_at'],
'X-PUT-Timestamp': info['put_timestamp'],
}
for key, (value, timestamp) in broker.metadata.iteritems():
if value and (key.lower() in self.save_headers or
key.lower().startswith('x-container-meta-')):
resp_headers[key] = value
ret = Response(request=req, headers=resp_headers,
content_type=out_content_type, charset='utf-8')
container_list = broker.list_objects_iter(limit, marker, end_marker,
prefix, delimiter, path)
if out_content_type == 'application/json':
ret.body = json.dumps([self.update_data_record(record)
for record in container_list])
elif out_content_type.endswith('/xml'):
doc = Element('container', name=container.decode('utf-8'))
for obj in container_list:
record = self.update_data_record(obj)
if 'subdir' in record:
name = record['subdir'].decode('utf-8')
sub = SubElement(doc, 'subdir', name=name)
SubElement(sub, 'name').text = name
else:
obj_element = SubElement(doc, 'object')
for field in ["name", "hash", "bytes", "content_type",
"last_modified"]:
SubElement(obj_element, field).text = str(
record.pop(field)).decode('utf-8')
for field in sorted(record):
SubElement(obj_element, field).text = str(
record[field]).decode('utf-8')
ret.body = tostring(doc, encoding='UTF-8').replace(
"<?xml version='1.0' encoding='UTF-8'?>",
'<?xml version="1.0" encoding="UTF-8"?>', 1)
else:
if not container_list:
return HTTPNoContent(request=req, headers=resp_headers)
ret.body = '\n'.join(rec[0] for rec in container_list) + '\n'
return ret
@public
@replication
@timing_stats(sample_rate=0.01)
def REPLICATE(self, req):
"""
Handle HTTP REPLICATE request (json-encoded RPC calls for replication.)
"""
post_args = split_and_validate_path(req, 3)
drive, partition, hash = post_args
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
try:
args = json.load(req.environ['wsgi.input'])
except ValueError as err:
return HTTPBadRequest(body=str(err), content_type='text/plain')
ret = self.replicator_rpc.dispatch(post_args, args)
ret.request = req
return ret
@public
@timing_stats()
def POST(self, req):
"""Handle HTTP POST request."""
drive, part, account, container = split_and_validate_path(req, 4)
if 'x-timestamp' not in req.headers or \
not check_float(req.headers['x-timestamp']):
return HTTPBadRequest(body='Missing or bad timestamp',
request=req, content_type='text/plain')
if 'x-container-sync-to' in req.headers:
err = validate_sync_to(req.headers['x-container-sync-to'],
self.allowed_sync_hosts)
if err:
return HTTPBadRequest(err)
if self.mount_check and not check_mount(self.root, drive):
return HTTPInsufficientStorage(drive=drive, request=req)
broker = self._get_container_broker(drive, part, account, container)
if broker.is_deleted():
return HTTPNotFound(request=req)
timestamp = normalize_timestamp(req.headers['x-timestamp'])
metadata = {}
metadata.update(
(key, (value, timestamp)) for key, value in req.headers.iteritems()
if key.lower() in self.save_headers or
key.lower().startswith('x-container-meta-'))
if metadata:
if 'X-Container-Sync-To' in metadata:
if 'X-Container-Sync-To' not in broker.metadata or \
metadata['X-Container-Sync-To'][0] != \
broker.metadata['X-Container-Sync-To'][0]:
broker.set_x_container_sync_points(-1, -1)
broker.update_metadata(metadata)
return HTTPNoContent(request=req)
def __call__(self, env, start_response):
start_time = time.time()
req = Request(env)
self.logger.txn_id = req.headers.get('x-trans-id', None)
if not check_utf8(req.path_info):
res = HTTPPreconditionFailed(body='Invalid UTF8 or contains NULL')
else:
try:
# disallow methods which have not been marked 'public'
try:
method = getattr(self, req.method)
getattr(method, 'publicly_accessible')
replication_method = getattr(method, 'replication', False)
if (self.replication_server is not None and
self.replication_server != replication_method):
raise AttributeError('Not allowed method.')
except AttributeError:
res = HTTPMethodNotAllowed()
else:
res = method(req)
except HTTPException as error_response:
res = error_response
except (Exception, Timeout):
self.logger.exception(_(
'ERROR __call__ error with %(method)s %(path)s '),
{'method': req.method, 'path': req.path})
res = HTTPInternalServerError(body=traceback.format_exc())
trans_time = '%.4f' % (time.time() - start_time)
log_message = '%s - - [%s] "%s %s" %s %s "%s" "%s" "%s" %s' % (
req.remote_addr,
time.strftime('%d/%b/%Y:%H:%M:%S +0000',
time.gmtime()),
req.method, req.path,
res.status.split()[0], res.content_length or '-',
req.headers.get('x-trans-id', '-'),
req.referer or '-', req.user_agent or '-',
trans_time)
if req.method.upper() == 'REPLICATE':
self.logger.debug(log_message)
else:
self.logger.info(log_message)
return res(env, start_response)
def app_factory(global_conf, **local_conf):
"""paste.deploy app factory for creating WSGI container server apps"""
conf = global_conf.copy()
conf.update(local_conf)
return ContainerController(conf)
| apache-2.0 |
UnILabKAIST/slask | limbo/plugins/map.py | 10 | 1331 | # -*- coding: utf-8 -*-
"""!map <place> return a map of place. Optional "zoom" and "maptype" parameters are accepted."""
# example queries:
# !map new york city
# !map united states zoom=4
# !map united states zoom=4 maptype=satellite
try:
from urllib import quote
except ImportError:
from urllib.request import quote
import re
def makemap(query):
querywords = []
args = {
"maptype": "roadmap",
}
for word in query.split(" "):
if '=' in word:
opt, val = word.split("=")
args[opt] = val
else:
querywords.append(word)
query = quote(" ".join(querywords).encode("utf8"))
# Slack seems to ignore the size param
#
# To get google to auto-reasonably-zoom its map, you have to use a marker
# instead of using a "center" parameter. I found that setting it to tiny
# and grey makes it the least visible.
url = "https://maps.googleapis.com/maps/api/staticmap?size=800x400&markers=size:tiny%7Ccolor:0xAAAAAA%7C{0}&maptype={1}"
url = url.format(query, args["maptype"])
if "zoom" in args:
url += "&zoom={0}".format(args["zoom"])
return url
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"!map (.*)", text)
if not match:
return
return makemap(match[0])
| mit |
ChrisGoedhart/Uforia | source/django/conf/locale/sr/formats.py | 655 | 1980 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'j. F Y.'
TIME_FORMAT = 'H:i'
DATETIME_FORMAT = 'j. F Y. H:i'
YEAR_MONTH_FORMAT = 'F Y.'
MONTH_DAY_FORMAT = 'j. F'
SHORT_DATE_FORMAT = 'j.m.Y.'
SHORT_DATETIME_FORMAT = 'j.m.Y. H:i'
FIRST_DAY_OF_WEEK = 1
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
DATE_INPUT_FORMATS = (
'%d.%m.%Y.', '%d.%m.%y.', # '25.10.2006.', '25.10.06.'
'%d. %m. %Y.', '%d. %m. %y.', # '25. 10. 2006.', '25. 10. 06.'
'%Y-%m-%d', # '2006-10-25'
# '%d. %b %y.', '%d. %B %y.', # '25. Oct 06.', '25. October 06.'
# '%d. %b \'%y.', '%d. %B \'%y.', # '25. Oct '06.', '25. October '06.'
# '%d. %b %Y.', '%d. %B %Y.', # '25. Oct 2006.', '25. October 2006.'
)
TIME_INPUT_FORMATS = (
'%H:%M:%S', # '14:30:59'
'%H:%M', # '14:30'
)
DATETIME_INPUT_FORMATS = (
'%d.%m.%Y. %H:%M:%S', # '25.10.2006. 14:30:59'
'%d.%m.%Y. %H:%M', # '25.10.2006. 14:30'
'%d.%m.%Y.', # '25.10.2006.'
'%d.%m.%y. %H:%M:%S', # '25.10.06. 14:30:59'
'%d.%m.%y. %H:%M', # '25.10.06. 14:30'
'%d.%m.%y.', # '25.10.06.'
'%d. %m. %Y. %H:%M:%S', # '25. 10. 2006. 14:30:59'
'%d. %m. %Y. %H:%M', # '25. 10. 2006. 14:30'
'%d. %m. %Y.', # '25. 10. 2006.'
'%d. %m. %y. %H:%M:%S', # '25. 10. 06. 14:30:59'
'%d. %m. %y. %H:%M', # '25. 10. 06. 14:30'
'%d. %m. %y.', # '25. 10. 06.'
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| gpl-2.0 |
levkar/odoo-addons | account_check/models/account_voucher.py | 2 | 5976 | # -*- coding: utf-8 -*-
from openerp import models, fields, _, api
import openerp.addons.decimal_precision as dp
import logging
from openerp.exceptions import Warning
_logger = logging.getLogger(__name__)
class account_voucher(models.Model):
_inherit = 'account.voucher'
received_third_check_ids = fields.One2many(
'account.check', 'voucher_id', 'Third Checks',
domain=[('type', '=', 'third')],
context={'default_type': 'third', 'from_voucher': True},
required=False, readonly=True, copy=False,
states={'draft': [('readonly', False)]}
)
issued_check_ids = fields.One2many(
'account.check', 'voucher_id', 'Issued Checks',
domain=[('type', '=', 'issue')],
context={'default_type': 'issue', 'from_voucher': True}, copy=False,
required=False, readonly=True, states={'draft': [('readonly', False)]}
)
delivered_third_check_ids = fields.One2many(
'account.check', 'third_handed_voucher_id',
'Third Checks', domain=[('type', '=', 'third')], copy=False,
context={'from_voucher': True}, required=False, readonly=True,
states={'draft': [('readonly', False)]}
)
validate_only_checks = fields.Boolean(
related='journal_id.validate_only_checks',
string='Validate only Checks', readonly=True,
)
check_type = fields.Selection(
related='journal_id.check_type',
string='Check Type', readonly=True,
)
dummy_journal_id = fields.Many2one(
related='journal_id', readonly=True,
string='Dummy Journa',
help='Field used for new api onchange methods over journal',
)
amount_readonly = fields.Float(
related='amount', string='Total',
digits_compute=dp.get_precision('Account'), readonly=True,
)
@api.onchange('dummy_journal_id')
def change_dummy_journal_id(self):
"""Unlink checks on journal change"""
self.delivered_third_check_ids = False
self.issued_check_ids = False
self.received_third_check_ids = False
@api.multi
def action_cancel_draft(self):
res = super(account_voucher, self).action_cancel_draft()
checks = self.env['account.check'].search(
[('voucher_id', 'in', self.ids)])
checks.action_cancel_draft()
return res
@api.model
def first_move_line_get(
self, voucher_id, move_id, company_currency,
current_currency):
vals = super(account_voucher, self).first_move_line_get(
voucher_id, move_id, company_currency, current_currency)
voucher = self.browse(voucher_id)
if company_currency != current_currency and voucher.amount:
debit = vals.get('debit')
credit = vals.get('credit')
total = debit - credit
exchange_rate = total / voucher.amount
checks = []
if voucher.check_type == 'third':
checks = voucher.received_third_check_ids
elif voucher.check_type == 'issue':
checks = voucher.issued_check_ids
for check in checks:
company_currency_amount = abs(check.amount * exchange_rate)
check.company_currency_amount = company_currency_amount
return vals
@api.multi
def cancel_voucher(self):
for voucher in self:
for check in voucher.received_third_check_ids:
if check.state not in ['draft', 'holding']:
raise Warning(_(
'You can not cancel a voucher thas has received third checks in states other than "draft or "holding". First try to change check state.'))
for check in voucher.issued_check_ids:
if check.state not in ['draft', 'handed']:
raise Warning(_(
'You can not cancel a voucher thas has issue checks in states other than "draft or "handed". First try to change check state.'))
for check in voucher.delivered_third_check_ids:
if check.state not in ['handed']:
raise Warning(_(
'You can not cancel a voucher thas has delivered checks in states other than "handed". First try to change check state.'))
res = super(account_voucher, self).cancel_voucher()
checks = self.env['account.check'].search([
'|',
('voucher_id', 'in', self.ids),
('third_handed_voucher_id', 'in', self.ids)])
for check in checks:
check.signal_workflow('cancel')
return res
def proforma_voucher(self, cr, uid, ids, context=None):
res = super(account_voucher, self).proforma_voucher(
cr, uid, ids, context=None)
for voucher in self.browse(cr, uid, ids, context=context):
if voucher.type == 'payment':
for check in voucher.issued_check_ids:
check.signal_workflow('draft_router')
for check in voucher.delivered_third_check_ids:
check.signal_workflow('holding_handed')
elif voucher.type == 'receipt':
for check in voucher.received_third_check_ids:
check.signal_workflow('draft_router')
return res
@api.one
@api.onchange('amount_readonly')
def onchange_amount_readonly(self):
self.amount = self.amount_readonly
@api.one
@api.onchange('received_third_check_ids', 'issued_check_ids')
def onchange_customer_checks(self):
self.amount_readonly = sum(
x.amount for x in self.received_third_check_ids)
@api.one
@api.onchange('delivered_third_check_ids', 'issued_check_ids')
def onchange_supplier_checks(self):
amount = sum(x.amount for x in self.delivered_third_check_ids)
amount += sum(x.amount for x in self.issued_check_ids)
self.amount_readonly = amount
| agpl-3.0 |
victoryckl/zxing-2.2 | cpp/scons/scons-local-2.0.0.final.0/SCons/Tool/Perforce.py | 34 | 3814 | """SCons.Tool.Perforce.py
Tool-specific initialization for Perforce Source Code Management system.
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
__revision__ = "src/engine/SCons/Tool/Perforce.py 5023 2010/06/14 22:05:46 scons"
import os
import SCons.Action
import SCons.Builder
import SCons.Node.FS
import SCons.Util
# This function should maybe be moved to SCons.Util?
from SCons.Tool.PharLapCommon import addPathIfNotExists
# Variables that we want to import from the base OS environment.
_import_env = [ 'P4PORT', 'P4CLIENT', 'P4USER', 'USER', 'USERNAME', 'P4PASSWD',
'P4CHARSET', 'P4LANGUAGE', 'SystemRoot' ]
PerforceAction = SCons.Action.Action('$P4COM', '$P4COMSTR')
def generate(env):
"""Add a Builder factory function and construction variables for
Perforce to an Environment."""
def PerforceFactory(env=env):
""" """
import SCons.Warnings as W
W.warn(W.DeprecatedSourceCodeWarning, """The Perforce() factory is deprecated and there is no replacement.""")
return SCons.Builder.Builder(action = PerforceAction, env = env)
#setattr(env, 'Perforce', PerforceFactory)
env.Perforce = PerforceFactory
env['P4'] = 'p4'
env['P4FLAGS'] = SCons.Util.CLVar('')
env['P4COM'] = '$P4 $P4FLAGS sync $TARGET'
try:
environ = env['ENV']
except KeyError:
environ = {}
env['ENV'] = environ
# Perforce seems to use the PWD environment variable rather than
# calling getcwd() for itself, which is odd. If no PWD variable
# is present, p4 WILL call getcwd, but this seems to cause problems
# with good ol' Windows's tilde-mangling for long file names.
environ['PWD'] = env.Dir('#').get_abspath()
for var in _import_env:
v = os.environ.get(var)
if v:
environ[var] = v
if SCons.Util.can_read_reg:
# If we can read the registry, add the path to Perforce to our environment.
try:
k=SCons.Util.RegOpenKeyEx(SCons.Util.hkey_mod.HKEY_LOCAL_MACHINE,
'Software\\Perforce\\environment')
val, tok = SCons.Util.RegQueryValueEx(k, 'P4INSTROOT')
addPathIfNotExists(environ, 'PATH', val)
except SCons.Util.RegError:
# Can't detect where Perforce is, hope the user has it set in the
# PATH.
pass
def exists(env):
return env.Detect('p4')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| apache-2.0 |
sebp/scikit-survival | tests/test_stacking.py | 1 | 8545 | import numpy
from numpy.testing import assert_array_almost_equal
import pytest
from sklearn.base import BaseEstimator
from sklearn.datasets import load_iris
from sklearn.linear_model import LogisticRegression
from sklearn.metrics import accuracy_score, roc_auc_score
from sklearn.svm import SVC
from sklearn.tree import DecisionTreeClassifier
from sksurv.linear_model import CoxPHSurvivalAnalysis
from sksurv.meta import MeanEstimator, Stacking
from sksurv.svm import FastSurvivalSVM
from sksurv.testing import assert_cindex_almost_equal
class _NoFitEstimator(BaseEstimator):
pass
class _NoPredictDummy(BaseEstimator):
def fit(self, X, y):
pass
class _PredictDummy(BaseEstimator):
def fit(self, X, y):
pass
def predict(self, X):
pass
class _PredictProbaDummy(BaseEstimator):
def fit(self, X, y):
pass
def predict_proba(self, X):
pass
class TestStackingClassifier:
@staticmethod
@pytest.mark.parametrize('estimator', [_NoFitEstimator, _NoPredictDummy])
def test_base_estimator(estimator):
with pytest.raises(TypeError,
match=r"All base estimators should implement fit and predict/predict_proba (.+) doesn't"):
Stacking(_PredictDummy, [('m1', estimator)])
@staticmethod
def test_meta_no_fit():
with pytest.raises(TypeError,
match=r"meta estimator should implement fit (.+) doesn't"):
Stacking(_NoFitEstimator, [('m1', _PredictDummy)])
@staticmethod
def test_names_not_unique():
with pytest.raises(ValueError,
match=r"Names provided are not unique: \('m1', 'm2', 'm1'\)"):
Stacking(_NoFitEstimator,
[('m1', _PredictDummy), ('m2', _PredictDummy), ('m1', _PredictDummy)])
@staticmethod
def test_fit():
data = load_iris()
x = data["data"]
y = data["target"]
meta = Stacking(LogisticRegression(solver='liblinear', multi_class='ovr'),
[('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, gamma='auto', random_state=0))])
assert 2 == len(meta)
meta.fit(x, y)
p = meta._predict_estimators(x)
assert (x.shape[0], 3 * 2) == p.shape
assert (3, 3 * 2) == meta.meta_estimator.coef_.shape
@staticmethod
def test_fit_sample_weights():
data = load_iris()
x = data["data"]
y = data["target"]
meta = Stacking(LogisticRegression(solver='liblinear', multi_class='ovr'),
[('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, gamma='auto', random_state=0))])
sample_weight = numpy.random.RandomState(0).uniform(size=x.shape[0])
meta.fit(x, y, tree__sample_weight=sample_weight, svm__sample_weight=sample_weight)
@staticmethod
def test_set_params():
meta = Stacking(LogisticRegression(), [('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, random_state=0))],
probabilities=True)
assert 2 == len(meta)
meta.set_params(tree__min_samples_split=7, svm__C=0.05)
assert 7 == meta.get_params()["tree__min_samples_split"]
assert 0.05 == meta.get_params()["svm__C"]
assert isinstance(meta.get_params()["meta_estimator"], LogisticRegression)
assert meta.get_params()["probabilities"]
meta.set_params(meta_estimator=DecisionTreeClassifier(), probabilities=False)
assert isinstance(meta.get_params()["meta_estimator"], DecisionTreeClassifier)
assert not meta.get_params()["probabilities"]
p = meta.get_params(deep=False)
assert set(p.keys()) == {"meta_estimator", "base_estimators", "probabilities"}
@staticmethod
def test_predict():
data = load_iris()
x = data["data"]
y = data["target"]
meta = Stacking(LogisticRegression(multi_class='multinomial', solver='lbfgs'),
[('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, gamma='auto', random_state=0))])
assert 2 == len(meta)
meta.fit(x, y)
p = meta.predict(x)
acc = accuracy_score(y, p)
assert acc >= 0.98
@staticmethod
def test_predict_proba():
data = load_iris()
x = data["data"]
y = data["target"]
meta = Stacking(LogisticRegression(multi_class='multinomial', solver='lbfgs'),
[('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, gamma='auto', random_state=0))])
meta.fit(x, y)
p = meta.predict_proba(x)
scores = numpy.empty(3)
for i, c in enumerate(meta.meta_estimator.classes_):
scores[i] = roc_auc_score(numpy.asarray(y == c, dtype=int), p[:, i])
assert_array_almost_equal(numpy.array([1.0, 0.9986, 0.9986]), scores)
@staticmethod
def test_predict_log_proba():
data = load_iris()
x = data["data"]
y = data["target"]
meta = Stacking(LogisticRegression(multi_class='multinomial', solver='lbfgs'),
[('tree', DecisionTreeClassifier(max_depth=1, random_state=0)),
('svm', SVC(probability=True, gamma='auto', random_state=0))])
meta.fit(x, y)
p = meta.predict_log_proba(x)
scores = numpy.empty(3)
for i, c in enumerate(meta.meta_estimator.classes_):
scores[i] = roc_auc_score(numpy.asarray(y == c, dtype=int), p[:, i])
assert_array_almost_equal(numpy.array([1.0, 0.9986, 0.9986]), scores)
class TestStackingSurvivalAnalysis:
@staticmethod
def test_fit(make_whas500):
whas500 = make_whas500(with_mean=False, with_std=False, to_numeric=True)
meta = Stacking(MeanEstimator(),
[('coxph', CoxPHSurvivalAnalysis()),
('svm', FastSurvivalSVM(random_state=0))],
probabilities=False)
assert 2 == len(meta)
meta.fit(whas500.x, whas500.y)
p = meta._predict_estimators(whas500.x)
assert (whas500.x.shape[0], 2) == p.shape
@staticmethod
def test_set_params():
meta = Stacking(_PredictDummy(),
[('coxph', CoxPHSurvivalAnalysis()),
('svm', FastSurvivalSVM(random_state=0))],
probabilities=False)
meta.set_params(coxph__alpha=1.0, svm__alpha=0.4132)
assert 1.0 == meta.get_params()["coxph__alpha"]
assert 0.4132 == meta.get_params()["svm__alpha"]
@staticmethod
def test_predict(make_whas500):
whas500 = make_whas500(with_mean=False, with_std=False, to_numeric=True)
meta = Stacking(MeanEstimator(),
[('coxph', CoxPHSurvivalAnalysis()),
('svm', FastSurvivalSVM(random_state=0))],
probabilities=False)
meta.fit(whas500.x, whas500.y)
# result is different if randomForestSRC has not been compiled with OpenMP support
p = meta.predict(whas500.x)
assert_cindex_almost_equal(whas500.y['fstat'], whas500.y['lenfol'], p,
(0.7848807, 58983, 16166, 0, 14))
@staticmethod
def test_predict_proba():
meta = Stacking(_PredictDummy(),
[('coxph', CoxPHSurvivalAnalysis()),
('svm', FastSurvivalSVM(random_state=0))],
probabilities=False)
with pytest.raises(AttributeError,
match="'_PredictDummy' object has no attribute 'predict_proba'"):
meta.predict_proba # pylint: disable=pointless-statement
@staticmethod
def test_score(make_whas500):
whas500 = make_whas500(with_mean=False, with_std=False, to_numeric=True)
meta = Stacking(MeanEstimator(),
[('coxph', CoxPHSurvivalAnalysis()),
('svm', FastSurvivalSVM(random_state=0))],
probabilities=False)
meta.fit(whas500.x, whas500.y)
c_index = meta.score(whas500.x, whas500.y)
assert round(abs(c_index - 0.7848807), 5) == 0
| gpl-3.0 |
vrutkovs/atomic-reactor | atomic_reactor/plugins/post_tag_from_config.py | 2 | 4662 | """
Copyright (c) 2016 Red Hat, Inc
All rights reserved.
This software may be modified and distributed under the terms
of the BSD license. See the LICENSE file for details.
"""
import os
import re
from atomic_reactor.plugin import PostBuildPlugin
from atomic_reactor.constants import INSPECT_CONFIG, TAG_NAME_REGEX
from atomic_reactor.util import get_preferred_label_key, df_parser, LabelFormatter
class TagFromConfigPlugin(PostBuildPlugin):
"""
Tags image with additional tags found in configuration file
Configuration file must be named "additional-tags" and it must
reside in repository as a sibling of Dockerfile. Each line in file
is considered as a different tag to be applied. Empty lines and
tag names containing hyphens are ignored. Tags will be prefixed by
the value of Name label.
For example, using the following configuration file:
v1.0
v1.0.1
And assuming the Name label in Dockerfile is set to "fedora", the
image will be tagged as:
fedora:v1.0
fedora:v1.0.1
If configuration file is not found, this plugin takes no action.
"""
key = 'tag_from_config'
is_allowed_to_fail = False
TAGS_FILENAME = 'additional-tags'
def __init__(self, tasker, workflow, tag_suffixes=None):
super(TagFromConfigPlugin, self).__init__(tasker, workflow)
self.tag_suffixes = tag_suffixes
self.labels = None
def parse_and_add_tags(self):
tags = []
name = self.get_component_name()
for tag_suffix in self.tag_suffixes.get('unique', []):
tag = '{}:{}'.format(name, tag_suffix)
self.log.debug('Using additional unique tag %s', tag)
self.workflow.tag_conf.add_unique_image(tag)
tags.append(tag)
for tag_suffix in self.tag_suffixes.get('primary', []):
p_suffix = LabelFormatter().vformat(tag_suffix, [], self.labels)
p_tag = '{}:{}'.format(name, p_suffix)
self.log.debug('Using additional primary tag %s', p_tag)
self.workflow.tag_conf.add_primary_image(p_tag)
tags.append(p_tag)
return tags
def get_and_add_tags(self):
tags = []
build_file_dir = self.workflow.source.get_build_file_path()[1]
tags_filename = os.path.join(build_file_dir, self.TAGS_FILENAME)
if not os.path.exists(tags_filename):
self.log.debug('"%s" not found. '
'No additional tags will be applied.',
tags_filename)
return tags
with open(tags_filename) as tags_file:
for tag in tags_file:
tag = tag.strip()
tag_name_is_valid = re.match(TAG_NAME_REGEX, tag) is not None
if tag_name_is_valid and '-' not in tag:
tags.append(tag)
else:
self.log.warning("tag '%s' does not match '%s'"
"or includes dashes, ignoring", tag, TAG_NAME_REGEX)
if tags:
name = self.get_component_name()
for i, tag_suffix in enumerate(tags):
tag = '{}:{}'.format(name, tag_suffix)
self.log.debug('Using additional tag: %s', tag)
self.workflow.tag_conf.add_primary_image(tag)
# Store modified name.
tags[i] = tag
return tags
def get_component_name(self):
try:
name_label = str(get_preferred_label_key(self.labels, "name"))
name = self.labels[name_label]
except KeyError:
self.log.error('Unable to determine component from "Labels"')
raise
return name
def run(self):
self.lookup_labels()
if self.tag_suffixes is not None:
tags = self.parse_and_add_tags()
else:
tags = self.get_and_add_tags()
return tags
def lookup_labels(self):
if self.workflow.build_result.is_image_available():
if not self.workflow.built_image_inspect:
raise RuntimeError('There is no inspect data for built image. '
'Has the build succeeded?')
try:
self.labels = self.workflow.built_image_inspect[INSPECT_CONFIG]['Labels']
except (TypeError, KeyError):
self.log.error('Unable to determine "Labels" from built image')
raise
else:
self.labels = df_parser(self.workflow.builder.df_path, workflow=self.workflow,
env_replace=True).labels
| bsd-3-clause |
RAtechntukan/Sick-Beard | sickbeard/providers/ezrss.py | 35 | 5983 | # Author: Nic Wolfe <nic@wolfeden.ca>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import urllib
import re
from xml.dom.minidom import parseString
import sickbeard
import generic
from sickbeard.common import Quality
from sickbeard import logger
from sickbeard import tvcache
from sickbeard.helpers import sanitizeSceneName, get_xml_text
from sickbeard.exceptions import ex
class EZRSSProvider(generic.TorrentProvider):
def __init__(self):
generic.TorrentProvider.__init__(self, "EZRSS")
self.supportsBacklog = True
self.cache = EZRSSCache(self)
self.url = 'https://www.ezrss.it/'
def isEnabled(self):
return sickbeard.EZRSS
def imageName(self):
return 'ezrss.png'
def getQuality(self, item):
torrent_node = item.getElementsByTagName('torrent')[0]
filename_node = torrent_node.getElementsByTagName('fileName')[0]
filename = get_xml_text(filename_node)
quality = Quality.nameQuality(filename)
return quality
def findSeasonResults(self, show, season):
results = {}
if show.air_by_date:
logger.log(u"EZRSS doesn't support air-by-date backlog because of limitations on their RSS search.", logger.WARNING)
return results
results = generic.TorrentProvider.findSeasonResults(self, show, season)
return results
def _get_season_search_strings(self, show, season=None):
params = {}
if not show:
return params
params['show_name'] = sanitizeSceneName(show.name, ezrss=True).replace('.',' ').encode('utf-8')
if season != None:
params['season'] = season
return [params]
def _get_episode_search_strings(self, ep_obj):
params = {}
if not ep_obj:
return params
params['show_name'] = sanitizeSceneName(ep_obj.show.name, ezrss=True).replace('.',' ').encode('utf-8')
if ep_obj.show.air_by_date:
params['date'] = str(ep_obj.airdate)
else:
params['season'] = ep_obj.season
params['episode'] = ep_obj.episode
return [params]
def _doSearch(self, search_params, show=None, season= None):
params = {"mode": "rss"}
if search_params:
params.update(search_params)
searchURL = self.url + 'search/index.php?' + urllib.urlencode(params)
logger.log(u"Search string: " + searchURL, logger.DEBUG)
data = self.getURL(searchURL)
if not data:
return []
try:
parsedXML = parseString(data)
items = parsedXML.getElementsByTagName('item')
except Exception, e:
logger.log(u"Error trying to load EZRSS RSS feed: "+ex(e), logger.ERROR)
logger.log(u"RSS data: "+data, logger.DEBUG)
return []
results = []
for curItem in items:
(title, url) = self._get_title_and_url(curItem)
if not title or not url:
logger.log(u"The XML returned from the EZRSS RSS feed is incomplete, this result is unusable: "+data, logger.ERROR)
continue
results.append(curItem)
return results
def _get_title_and_url(self, item):
(title, url) = generic.TorrentProvider._get_title_and_url(self, item)
torrent_node = item.getElementsByTagName('torrent')[0]
filename_node = torrent_node.getElementsByTagName('fileName')[0]
filename = get_xml_text(filename_node)
new_title = self._extract_name_from_filename(filename)
if new_title:
title = new_title
logger.log(u"Extracted the name "+title+" from the torrent link", logger.DEBUG)
return (title, url)
def _extract_name_from_filename(self, filename):
name_regex = '(.*?)\.?(\[.*]|\d+\.TPB)\.torrent$'
logger.log(u"Comparing "+name_regex+" against "+filename, logger.DEBUG)
match = re.match(name_regex, filename, re.I)
if match:
return match.group(1)
return None
class EZRSSCache(tvcache.TVCache):
def __init__(self, provider):
tvcache.TVCache.__init__(self, provider)
# only poll EZRSS every 15 minutes max
self.minTime = 15
def _getRSSData(self):
url = self.provider.url + 'feed/'
logger.log(u"EZRSS cache update URL: "+ url, logger.DEBUG)
data = self.provider.getURL(url)
return data
def _parseItem(self, item):
(title, url) = self.provider._get_title_and_url(item)
if not title or not url:
logger.log(u"The XML returned from the EZRSS RSS feed is incomplete, this result is unusable", logger.ERROR)
return
logger.log(u"Adding item from RSS to cache: "+title, logger.DEBUG)
self._addCacheEntry(title, url)
provider = EZRSSProvider() | gpl-3.0 |
jalexvig/tensorflow | tensorflow/contrib/kfac/python/ops/layer_collection_lib.py | 17 | 1810 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Registry for layers and their parameters/variables.
This represents the collection of all layers in the approximate Fisher
information matrix to which a particular FisherBlock may belong. That is, we
might have several layer collections for one TF graph (if we have multiple K-FAC
optimizers being used, for example.)
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.kfac.python.ops.layer_collection import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
"get_default_layer_collection",
"set_default_layer_collection",
"LayerParametersDict",
"LayerCollection",
"APPROX_KRONECKER_NAME",
"APPROX_DIAGONAL_NAME",
"APPROX_FULL_NAME",
"VARIABLE_SCOPE",
"APPROX_KRONECKER_INDEP_NAME",
"APPROX_KRONECKER_SERIES_1_NAME",
"APPROX_KRONECKER_SERIES_2_NAME"
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
| apache-2.0 |
avati/samba | examples/logon/ntlogon/ntlogon.py | 146 | 12672 | #!/usr/bin/env python
"""
ntlogon.py written by Timothy (rhacer) Grant
Copyright 1999 - 2002 by Timothy Grant
is distributed under the terms of the GNU Public License.
The format for the configuration file is as follows:
While there is some room for confusion, we attempt to process things in
order of specificity: Global first, Group second, User third, OS Type
forth. This order can be debated forever, but it seems to make the most
sense.
# Everything in the Global section applies to all users logging on to the
# network
[Global]
@ECHO "Welcome to our network!!!"
NET TIME \\\\servername /SET /YES
NET USE F: \\\\servername\\globalshare /YES
# Map the private user area in the global section so we don't have to
# create individual user entries for each user!
NET USE U: \\\\servername\\%U /YES
# Group entries, User entries and OS entries each start with the
# keyword followed by a dash followed by--appropriately enough the Group
# name, the User name, or the OS name.
[Group-admin]
@ECHO "Welcome administrators!"
NET USE G: \\\\servername\\adminshare1 /YES
NET USE I: \\\\servername\\adminshare2 /YES
[Group-peons]
@ECHO "Be grateful we let you use computers!"
NET USE G: \\\\servername\\peonshare1 /YES
[Group-hackers]
@ECHO "What can I do for you today great one?"
NET USE G: \\\\servername\\hackershare1 /YES
NET USE I: \\\\servername\\adminshare2 /YES
[User-fred]
@ECHO "Hello there Fred!"
NET USE F: \\\\servername\\fredsspecialshare /YES
[OS-WfWg]
@ECHO "Time to upgrade it?"
# End configuration file
usage: ntlogon [-g | --group=groupname]
[-u | --user=username]
[-o | --os=osname]
[-m | --machine=netbiosname]
[-f | --templatefile=filename]
[-d | --dir=netlogon directory]
[-v | --version]
[-h | --help]
[--pause]
[--debug]
"""
#
#" This quote mark is an artifact of the inability of my editor to
# correctly colour code anything after the triple-quoted docstring.
# if your editor does not have this flaw, feel free to remove it.
import sys
import getopt
import re
import string
import os
version = "ntlogon.py v0.8"
def buildScript(buf, sections, group, user, ostype, machine, debug, pause):
"""
buildScript() Takes the contents of the template file and builds
a DOS batch file to be executed as an NT logon script. It does this
by determining which sections of the configuration file should be included
and creating a list object that contains each line contained in each
included section. The list object is then returned to the calling
routine.
All comments (#) are removed. A REM is inserted to show
which section of the configuration file each line comes from.
We leave blanklines as they are sometimes useful for debugging
We also replace all of the Samba macros (e.g., %U, %G, %a, %m) with their
expanded versions which have been passed to us by smbd
"""
hdrstring = ''
script = []
#
# These are the Samba macros that we currently know about.
# any user defined macros will also be added to this dictionary.
# We do not store the % sign as part of the macro name.
# The replace routine will prepend the % sign to all possible
# replacements.
#
macros = {
'U': user,
'G': group,
'a': ostype,
'm': machine
}
#
# Process each section defined in the list sections
#
for s in sections:
# print 'searching for: ' + s
idx = 0
while idx < len(buf):
ln = buf[idx]
#
# We need to set up a regex for each possible section we
# know about. This is slightly complicated due to the fact
# that section headers contain user defined text.
#
if s == 'Global':
hdrstring = '\[ *' + s + ' *\]'
elif s == 'Group':
hdrstring = '\[ *' + s + ' *- *' + group + ' *\]'
elif s == 'User':
hdrstring = '\[ *' + s + ' *- *' + user + ' *\]'
elif s == 'OS':
hdrstring = '\[ *' + s + ' *- *' + ostype + ' *\]'
elif s == 'Machine':
hdrstring = '\[ *' + s + ' *- *' + machine + ' *\]'
#
# See if we have found a section header
#
if re.search(r'(?i)' + hdrstring, ln):
idx = idx + 1 # increment the counter to move to the next
# line.
x = re.match(r'([^#\r\n]*)', ln) # Determine the section
# name and strip out CR/LF
# and comment information
if debug:
print 'rem ' + x.group(1) + ' commands'
else:
# create the rem at the beginning of each section of the
# logon script.
script.append('rem ' + x.group(1) + ' commands')
#
# process each line until we have found another section
# header
#
while not re.search(r'.*\[.*\].*', buf[idx]):
#
# strip comments and line endings
#
x = re.match(r'([^#\r\n]*)', buf[idx])
if string.strip(x.group(1)) != '' :
# if there is still content after stripping comments and
# line endings then this is a line to process
line = x.group(1)
#
# Check to see if this is a macro definition line
#
vardef = re.match(r'(.*)=(.*)', line)
if vardef:
varname = string.strip(vardef.group(1)) # Strip leading and
varsub = string.strip(vardef.group(2)) # and trailing spaces
if varname == '':
print "Error: No substition name specified line: %d" % idx
sys.exit(1)
if varsub == '':
print "Error: No substitution text provided line: %d" % idx
sys.exit(1)
if macros.has_key(varname):
print "Warning: macro %s redefined line: %d" % (varname, idx)
macros[varname] = varsub
idx = idx + 1
continue
#
# Replace all the macros that we currently
# know about.
#
# Iterate over the dictionary that contains all known
# macro substitutions.
#
# We test for a macro name by prepending % to each dictionary
# key.
#
for varname in macros.keys():
line = re.sub(r'%' + varname + r'(\W)',
macros[varname] + r'\1', line)
if debug:
print line
if pause:
print 'pause'
else:
script.append(line)
idx = idx + 1
if idx == len(buf):
break # if we have reached the end of the file
# stop processing.
idx = idx + 1 # increment the line counter
if debug:
print ''
else:
script.append('')
return script
# End buildScript()
def run():
"""
run() everything starts here. The main routine reads the command line
arguments, opens and reads the configuration file.
"""
configfile = '/etc/ntlogon.conf' # Default configuration file
group = '' # Default group
user = '' # Default user
ostype = '' # Default os
machine = '' # Default machine type
outfile = 'logon.bat' # Default batch file name
# this file name WILL take on the form
# username.bat if a username is specified
debug = 0 # Default debugging mode
pause = 0 # Default pause mode
outdir = '/usr/local/samba/netlogon/' # Default netlogon directory
sections = ['Global', 'Machine', 'OS', 'Group', 'User'] # Currently supported
# configuration file
# sections
options, args = getopt.getopt(sys.argv[1:], 'd:f:g:ho:u:m:v',
['templatefile=',
'group=',
'help',
'os=',
'user=',
'machine=',
'dir=',
'version',
'pause',
'debug'])
#
# Process the command line arguments
#
for i in options:
# template file to process
if (i[0] == '-f') or (i[0] == '--templatefile'):
configfile = i[1]
# print 'configfile = ' + configfile
# define the group to be used
elif (i[0] == '-g') or (i[0] == '--group'):
group = i[1]
# print 'group = ' + group
# define the os type
elif (i[0] == '-o') or (i[0] == '--os'):
ostype = i[1]
# print 'os = ' + os
# define the user
elif (i[0] == '-u') or (i[0] == '--user'):
user = i[1]
outfile = user + '.bat' # Setup the output file name
# print 'user = ' + user
# define the machine
elif (i[0] == '-m') or (i[0] == '--machine'):
machine = i[1]
# define the netlogon directory
elif (i[0] == '-d') or (i[0] == '--dir'):
outdir = i[1]
# print 'outdir = ' + outdir
# if we are asked to turn on debug info, do so.
elif (i[0] == '--debug'):
debug = 1
# print 'debug = ' + debug
# if we are asked to turn on the automatic pause functionality, do so
elif (i[0] == '--pause'):
pause = 1
# print 'pause = ' + pause
# if we are asked for the version number, print it.
elif (i[0] == '-v') or (i[0] == '--version'):
print version
sys.exit(0)
# if we are asked for help print the docstring.
elif (i[0] == '-h') or (i[0] == '--help'):
print __doc__
sys.exit(0)
#
# open the configuration file
#
try:
iFile = open(configfile, 'r')
except IOError:
print 'Unable to open configuration file: ' + configfile
sys.exit(1)
#
# open the output file
#
if not debug:
try:
oFile = open(outdir + outfile, 'w')
except IOError:
print 'Unable to open logon script file: ' + outdir + outfile
sys.exit(1)
buf = iFile.readlines() # read in the entire configuration file
#
# call the script building routine
#
script = buildScript(buf, sections, group, user, ostype, machine, debug, pause)
#
# write out the script file
#
if not debug:
for ln in script:
oFile.write(ln + '\r\n')
if pause:
if string.strip(ln) != '': # Because whitespace
oFile.write('pause' + '\r\n') # is a useful tool, we
# don't put pauses after
# an empty line.
# End run()
#
# immediate-mode commands, for drag-and-drop or execfile() execution
#
if __name__ == '__main__':
run()
else:
print "Module ntlogon.py imported."
print "To run, type: ntlogon.run()"
print "To reload after changes to the source, type: reload(ntlogon)"
#
# End NTLogon.py
#
| gpl-3.0 |
google/apitools | apitools/base/py/list_pager_test.py | 2 | 13398 | #
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for list_pager."""
import unittest
from apitools.base.py import list_pager
from apitools.base.py.testing import mock
from samples.fusiontables_sample.fusiontables_v1 \
import fusiontables_v1_client as fusiontables
from samples.fusiontables_sample.fusiontables_v1 \
import fusiontables_v1_messages as messages
from samples.iam_sample.iam_v1 import iam_v1_client as iam_client
from samples.iam_sample.iam_v1 import iam_v1_messages as iam_messages
class Example(object):
def __init__(self):
self.a = 'aaa'
self.b = 'bbb'
self.c = 'ccc'
class GetterSetterTest(unittest.TestCase):
def testGetattrNested(self):
o = Example()
self.assertEqual(list_pager._GetattrNested(o, 'a'), 'aaa')
self.assertEqual(list_pager._GetattrNested(o, ('a',)), 'aaa')
o.b = Example()
self.assertEqual(list_pager._GetattrNested(o, ('b', 'c')), 'ccc')
def testSetattrNested(self):
o = Example()
list_pager._SetattrNested(o, 'b', Example())
self.assertEqual(o.b.a, 'aaa')
list_pager._SetattrNested(o, ('b', 'a'), 'AAA')
self.assertEqual(o.b.a, 'AAA')
list_pager._SetattrNested(o, ('c',), 'CCC')
self.assertEqual(o.c, 'CCC')
class ListPagerTest(unittest.TestCase):
def _AssertInstanceSequence(self, results, n):
counter = 0
for instance in results:
self.assertEqual(instance.name, 'c' + str(counter))
counter += 1
self.assertEqual(counter, n)
def setUp(self):
self.mocked_client = mock.Client(fusiontables.FusiontablesV1)
self.mocked_client.Mock()
self.addCleanup(self.mocked_client.Unmock)
def testYieldFromList(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request)
self._AssertInstanceSequence(results, 8)
def testYieldNoRecords(self):
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=False)
self.assertEqual(0, len(list(results)))
def testYieldFromListPartial(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=6,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=2,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=6)
self._AssertInstanceSequence(results, 6)
def testYieldFromListPaging(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=5,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
messages.Column(name='c4'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=4,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c5'),
messages.Column(name='c6'),
messages.Column(name='c7'),
messages.Column(name='c8'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column,
request,
limit=9,
batch_size=5)
self._AssertInstanceSequence(results, 9)
def testYieldFromListBatchSizeNone(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=None,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='c1'),
messages.Column(name='c2'),
messages.Column(name='c3'),
messages.Column(name='c4'),
messages.Column(name='c5'),
messages.Column(name='c6'),
],
nextPageToken='x',
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column,
request,
limit=5,
batch_size=None)
self._AssertInstanceSequence(results, 5)
def testYieldFromListEmpty(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=6,
pageToken=None,
tableId='mytable',
),
messages.ColumnList())
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(client.column, request, limit=6)
self._AssertInstanceSequence(results, 0)
def testYieldFromListWithPredicate(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0'),
messages.Column(name='bad0'),
messages.Column(name='c1'),
messages.Column(name='bad1'),
],
nextPageToken='x',
))
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken='x',
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c2'),
],
))
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(
client.column, request, predicate=lambda x: 'c' in x.name)
self._AssertInstanceSequence(results, 3)
def testYieldFromListWithCustomGetFieldFunction(self):
self.mocked_client.column.List.Expect(
messages.FusiontablesColumnListRequest(
maxResults=100,
pageToken=None,
tableId='mytable',
),
messages.ColumnList(
items=[
messages.Column(name='c0')
]
))
custom_getter_called = []
def Custom_Getter(message, attribute):
custom_getter_called.append(True)
return getattr(message, attribute)
client = fusiontables.FusiontablesV1(get_credentials=False)
request = messages.FusiontablesColumnListRequest(tableId='mytable')
results = list_pager.YieldFromList(
client.column, request, get_field_func=Custom_Getter)
self._AssertInstanceSequence(results, 1)
self.assertEquals(1, len(custom_getter_called))
class ListPagerAttributeTest(unittest.TestCase):
def setUp(self):
self.mocked_client = mock.Client(iam_client.IamV1)
self.mocked_client.Mock()
self.addCleanup(self.mocked_client.Unmock)
def testYieldFromListWithAttributes(self):
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageSize=100,
pageToken=None,
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c0'),
iam_messages.PolicyDetail(fullResourcePath='c1'),
],
nextPageToken='x',
))
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageSize=100,
pageToken='x',
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c2'),
],
))
client = iam_client.IamV1(get_credentials=False)
request = iam_messages.GetPolicyDetailsRequest(
fullResourcePath='myresource')
results = list_pager.YieldFromList(
client.iamPolicies, request,
batch_size_attribute='pageSize',
method='GetPolicyDetails', field='policies')
i = 0
for i, instance in enumerate(results):
self.assertEquals('c{0}'.format(i), instance.fullResourcePath)
self.assertEquals(2, i)
def testYieldFromListWithNoBatchSizeAttribute(self):
self.mocked_client.iamPolicies.GetPolicyDetails.Expect(
iam_messages.GetPolicyDetailsRequest(
pageToken=None,
fullResourcePath='myresource',
),
iam_messages.GetPolicyDetailsResponse(
policies=[
iam_messages.PolicyDetail(fullResourcePath='c0'),
iam_messages.PolicyDetail(fullResourcePath='c1'),
],
))
client = iam_client.IamV1(get_credentials=False)
request = iam_messages.GetPolicyDetailsRequest(
fullResourcePath='myresource')
results = list_pager.YieldFromList(
client.iamPolicies, request,
batch_size_attribute=None,
method='GetPolicyDetails', field='policies')
i = 0
for i, instance in enumerate(results):
self.assertEquals('c{0}'.format(i), instance.fullResourcePath)
self.assertEquals(1, i)
| apache-2.0 |
jeromekelleher/msprime | msprime/species_trees.py | 1 | 22239 | #
# Copyright (C) 2020 University of Oxford
#
# This file is part of msprime.
#
# msprime is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# msprime is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with msprime. If not, see <http://www.gnu.org/licenses/>.
#
"""
Module responsible for parsing species trees.
"""
import collections
import re
try:
_newick_imported = False
import newick
_newick_imported = True
except ImportError: # pragma: no cover
pass
from . import demography as demog
def check_newick_import():
if not _newick_imported:
raise ImportError(
"The 'newick' module is required for species tree parsing. "
"If you installed msprime using conda, please install the "
"newick module using `conda install -c bioconda newick` or "
"'pip install newick'. If you installed msprime using pip "
"newick should have been automatically installed; please "
"open an issue on GitHub with details of your installation."
)
def parse_starbeast(tree, generation_time, time_units="myr"):
"""
Parse a nexus encoded species tree into a Demography object. See the
documentation of :class:`.Demography.from_starbeast` (the public interface)
for details.
"""
check_newick_import()
# Make sure that branch length units are either "myr" or "yr".
allowed_branch_lenth_units = ["myr", "yr"]
if time_units not in allowed_branch_lenth_units:
err = "The specified units for branch lengths ("
err += f'"{time_units}") are not accepted. '
err += 'Accepted units are "myr" (millions of years) or "yr" (years).'
raise ValueError(err)
generation_time = check_generation_time(generation_time)
# Get the number of generations per branch length unit.
generations_per_branch_length_unit = get_generations_per_branch_length_unit(
time_units, generation_time
)
translate_string, tree_string = parse_nexus(tree)
species_name_map = parse_translate_command(translate_string)
clean_tree_string = strip_extra_annotations(tree_string)
return process_starbeast_tree(
clean_tree_string, generations_per_branch_length_unit, species_name_map
)
def parse_number_or_mapping(value, message):
"""
Interpret the specified value as either a single floating point value,
or a mapping and returns a mapping.
"""
try:
x = float(value)
value = collections.defaultdict(lambda: x)
except TypeError:
if not isinstance(value, collections.abc.Mapping):
raise TypeError(message)
return value
def parse_initial_size(initial_size):
error_message = (
"initial_size argument must be a single number or a mapping from "
"species names to their population sizes."
)
return parse_number_or_mapping(initial_size, error_message)
def parse_growth_rate(growth_rate):
error_message = (
"growth_rate argument must be a single number or a mapping from "
"species names to their exponential growth rates."
)
return parse_number_or_mapping(growth_rate, error_message)
def parse_species_tree(
tree,
initial_size,
*,
time_units="gen",
generation_time=None,
growth_rate=None,
):
"""
Parse a newick encoded species tree into a Demography object. See the
documentation of :class:`.Demography.from_species_tree` (the public interface)
for details.
"""
check_newick_import()
# Make sure that branch length units are either "myr", "yr", or "gen".
allowed_branch_lenth_units = ["myr", "yr", "gen"]
if time_units not in allowed_branch_lenth_units:
err = "The specified units for branch lengths ("
err += f'"{time_units}") are not accepted. '
err += 'Accepted units are "myr" (millions of years), "yr" (years), '
err += 'and "gen" (generations).'
raise ValueError(err)
initial_size = parse_initial_size(initial_size)
if growth_rate is None:
growth_rate = 0
growth_rate = parse_growth_rate(growth_rate)
# Make sure that the generation time is either None or positive.
if generation_time is not None:
generation_time = check_generation_time(generation_time)
# Make sure that the generation time is specified if and only if
# branch lengths are not in units of generations.
if time_units == "gen":
if generation_time is not None:
err = 'With branch lengths in units of generations ("gen"), '
err += "a generation time should not be specified additionally."
raise ValueError(err)
else:
if generation_time is None:
err = "With branch lengths in units of "
err += f'"{time_units}", a generation time must be '
err += "specified additionally."
raise ValueError(err)
# Get the number of generations per branch length unit.
generations_per_branch_length_unit = get_generations_per_branch_length_unit(
time_units, generation_time
)
# Parse the tree with the newick library.
root = parse_newick(tree, generations_per_branch_length_unit)
# Define populations and demographic events according to the
# specified population size and the divergence times in the species tree.
# Each divergence event (node in the tree) corresponds to an ancestral
# population, and mass migration events with proportion 1 move all lineages
# from the child populations into this new populationl
population_id_map = {}
demography = demog.Demography()
def add_population(node):
name = None
if node.name is not None:
stripped = node.name.strip()
if len(stripped) > 0:
name = stripped
population = demography.add_population(
initial_size=initial_size[name],
growth_rate=growth_rate[name],
name=name,
)
population_id_map[node] = population.name
return population.name
# Add in the leaf populations first so that they get IDs 0..n - 1
for node in root.walk():
if len(node.descendants) == 0:
add_population(node)
# Now add in the internal node populations and the mass migration events
# joining them.
for node in root.walk("postorder"):
if len(node.descendants) > 0:
population_id = add_population(node)
child_pops = [population_id_map[child] for child in node.descendants]
demography.add_population_split(
time=node.time, derived=child_pops, ancestral=population_id
)
demography.sort_events()
demography.validate()
return demography
def process_starbeast_tree(
tree_string, generations_per_branch_length_unit, species_name_map
):
"""
Process the specified starbeast newick string with embedded dmv annotations
(but no others) and return the resulting population_configurations and
demographic_events.
"""
root = parse_newick(tree_string, generations_per_branch_length_unit)
demography = demog.Demography()
population_size_map = {}
population_id_map = {}
# The process here follows the same basic logic as parse_species_tree above
# but with some extra elaborations to account for changing population sizes
# and details of the extended newick annotations.
def add_population(node):
name = None
newick_id = node.name.strip().split("[")[0]
if len(newick_id) > 0:
name = species_name_map[newick_id]
population = demography.add_population(
initial_size=population_size_map[node], name=name
)
population_id_map[node] = population.name
return population.name
for node in root.walk():
if node.name is None:
raise ValueError("Annotation missing for one or more nodes.")
find_pattern = "\\&dmv=\\{([\\d\\.]+?)\\}"
dmv_patterns = re.search(find_pattern, node.name)
if dmv_patterns is None:
raise ValueError("No dmv annotation for node")
pop_size = float(dmv_patterns.group(1)) * generations_per_branch_length_unit
population_size_map[node] = pop_size
if len(node.descendants) == 0:
add_population(node)
for node in root.walk("postorder"):
if len(node.descendants) > 0:
population_id = add_population(node)
demography.add_population_split(
time=node.time,
ancestral=population_id,
derived=[population_id_map[child] for child in node.descendants],
)
demography.sort_events()
demography.validate()
return demography
def is_number(s):
try:
float(s)
return True
except ValueError:
return False
def check_generation_time(generation_time):
try:
generation_time = float(generation_time)
except ValueError:
raise ValueError("Generation time must be numeric.")
if generation_time <= 0:
raise ValueError("Generation time must be > 0.")
return generation_time
def get_generations_per_branch_length_unit(time_units, generation_time):
"""
Method to calculate the number of generations per branch length
unit, given the branch length unit and a generation time.
"""
if time_units == "gen":
generations_per_branch_length_unit = 1
elif time_units == "myr":
generations_per_branch_length_unit = 10 ** 6 / generation_time
else:
generations_per_branch_length_unit = 1 / generation_time
return generations_per_branch_length_unit
def parse_newick(tree, branch_length_multiplier):
"""
Parses the newick tree and annotates the resulting nodes with their
time values, appropriately scaled.
"""
# Parse the newick tree string.
parsed = newick.loads(tree)
if len(parsed) == 0:
raise ValueError(f"Not a valid newick tree: '{tree}'")
root = parsed[0]
# Set node depths (distances from root).
stack = [(root, 0)]
num_nodes = 0
max_depth = 0
while len(stack) > 0:
node, depth = stack.pop()
if depth > max_depth:
max_depth = depth
num_nodes += 1
node.depth = depth
for child in node.descendants:
stack.append((child, depth + child.length))
if num_nodes < 3:
raise ValueError("Newick tree must have at least three nodes")
# Set node times (distances from present).
for node in root.walk():
node.time = (max_depth - node.depth) * branch_length_multiplier
# We don't allow non ultrametric trees for now because it's unclear
# how we should deal with taking samples in this case. The code
# all works perfectly well other than this, though.
if node.is_leaf:
if abs(node.time) > 1e-8: # Arbitrary cutoff
raise ValueError(
f"All leaf populations must be at time 0: time={node.time}"
)
return root
def strip_extra_annotations(tree_string):
"""
Takes the input newick string and strips all extended newick annotations
other than the dmv attribute, returning the simplified newick string.
"""
if "[" not in tree_string:
raise ValueError("No annotation in tree string.")
if tree_string.count("[") != tree_string.count("]"):
raise ValueError("Unbalanced square brackets in annotation.")
if "&dmv={" not in tree_string:
raise ValueError("No dmv tag in annotation.")
if "}" not in tree_string:
raise ValueError("No closing curly brackets in annotation.")
# Make sure that each substring that begins with an opening square bracket and ends
# with a closing square bracket does not contain any further square or round brackets
# in it and that it does include the dmv tag.
in_annotation = False
annotation_string = ""
for x in range(len(tree_string)):
if tree_string[x] == "[":
in_annotation = True
annotation_string += tree_string[x]
elif tree_string[x] == "]":
in_annotation = False
annotation_string += tree_string[x]
assert "[" not in annotation_string[1:-1], "Square bracket in annotation"
assert "]" not in annotation_string[1:-1], "Square bracket in annotation"
assert annotation_string.count("&dmv=") == 1, "Multiple or no dmv tags"
# Make sure that the dmv tag is followed by a closing curly bracket.
# Also ensure that the dmv tag is the first in the annotation.
dmv_string = ""
in_dmv = False
for y in range(len(annotation_string)):
dmv_string += annotation_string[y]
if annotation_string[y] == "}":
break
err = "Uneven curly parentheses in dmv annotation"
assert dmv_string.count("{") == dmv_string.count("}"), err
assert dmv_string.count("{") == 1, "Multiple or no values in dmv annotation"
annotation_string = ""
# Make sure that a positive number is found between curly brackets.
clean_dmv_string = dmv_string.split("{")[1][0:-1]
assert is_number(clean_dmv_string), "dmv annotation is not a number"
assert float(clean_dmv_string) >= 0, "dmv annotation is negative"
elif in_annotation:
annotation_string += tree_string[x]
# Because the newick module doesn't support parsing extended newick attributes
# in general, we have to clean thing up manually before parsing the tree. Here,
# we get rid of all annotations except for dmv.
clean_tree_string = ""
in_annotation = False
in_dmv = False
for x in range(len(tree_string)):
if tree_string[x] == "[":
in_annotation = True
clean_tree_string += tree_string[x]
elif tree_string[x] == "]":
in_annotation = False
clean_tree_string += tree_string[x]
elif in_annotation:
if tree_string[x - 1] == "[" and tree_string[x] == "&":
clean_tree_string += "&"
if tree_string[x - 5 : x] == "dmv={":
in_dmv = True
clean_tree_string += "dmv={"
if in_dmv:
clean_tree_string += tree_string[x]
if tree_string[x] == "}":
in_dmv = False
else:
clean_tree_string += tree_string[x]
# Make sure that only dmv annotation remains in the tree string.
in_annotation = False
annotation_string = ""
for x in range(len(clean_tree_string)):
if clean_tree_string[x] == "[":
in_annotation = True
annotation_string += clean_tree_string[x]
elif clean_tree_string[x] == "]":
in_annotation = False
annotation_string += clean_tree_string[x]
assert annotation_string[0:7] == "[&dmv={", "Annotation could not be read"
assert annotation_string[-2:] == "}]", "Annotation could not be read"
assert is_number(annotation_string[7:-2]), "dmv annotation is not a number"
assert float(annotation_string[7:-2]) >= 0, "dmv annotation is negative"
annotation_string = ""
elif in_annotation:
annotation_string += clean_tree_string[x]
return clean_tree_string
def parse_translate_command(translate_command):
"""
Parses the species IDs used in a nexus newick string to their
more verbose species names. Returns a dictionary mapping the newick
values to the species names.
"""
# Use the translation block to back-translate species IDs in
# the tree string. The Nexus format definition does not
# define the format of the translate block. Here, we only
# allow comma-separated translation pairs with the species
# name used in the tree string to the left and the translation
# to the right.
# An example of an allowed translation is:
# "translate 1 spc1, 2 spc2, 3 spc3;"
# First, we trim the "translate" tag from the beginning.
assert translate_command[0:10] == "translate "
translate_command = translate_command[10:]
mapping = {}
for item in translate_command.split(","):
item_list = item.split()
if len(item_list) <= 1:
raise ValueError("Missing translation in translation block.")
if len(item_list) != 2:
err = "Species IDs in the translation block appear to include "
err += "whitespace. This is not supported."
raise ValueError(err)
newick_id, species_name = item_list
if newick_id in mapping:
raise ValueError(
f"Newick ID {newick_id} defined multiple times in translation"
)
mapping[newick_id] = species_name
if len(set(mapping.values())) != len(mapping):
raise ValueError("Duplicate species names in translation")
return mapping
def parse_nexus(nexus):
"""
Parse the specified nexus string, returning translate and tree comand
strings.
NOTE because we're assuming that the data is generated by starbeast we
aren't exhaustive in checking for malformed input. We try to catch
a lot of errors and to give good error messages in these cases. We also
put a lot of assertions to make sure that we're not silently
accepting malformed input data. Nonetheless, this is definitely not
a general purpose Nexus parser and should not be expected to work on
anything other than input that closely resembles starbeast output.
"""
# From the Nexus format definition (Maddison et al. 1997):
# "For the most part, whitespace, inluding newline characters, is ignored,
# with two exceptions: (1) whitespace indicates boundaries between words;
# (2) in interleaved matrices, newline characters indicate the boundary
# between data of different taxa."
# As we do not parse matrices (we're only interested in taxa and trees
# blocks), we ignore (2), replace newline characters with spaces and
# replace multiple whitespaces with a single one.
nexus_string = nexus.replace("\n", " ")
nexus_string = " ".join(nexus_string.split())
# From the Nexus format definition (Maddison et al. 1997):
# "Commands or subcommands that differ only in case are homonymous."
# We turn the whole nexus string into lowercase.
nexus_string = nexus_string.lower()
# Make sure that the string is in Nexus format.
if nexus_string[0:6] != "#nexus":
raise ValueError("The species tree does not appear to be in Nexus format.")
# From the Nexus format definition (Maddison et al. 1997):
# "Blocks are series of commands, beginning with a Begin command and ending
# with an End command."
# As semicolons are used only to terminate commands, potentially present
# whitespace before semicolons has no meaning; we remove it for easier
# parsing.
# Then we identify the trees block and raise a ValueError if none is found.
# This could be done with a regexp instead.
nexus_string = nexus_string.replace(" ;", ";")
tree_block_string = ""
in_tree_block = False
for x in range(len(nexus_string)):
if nexus_string[x : x + 12] == "begin trees;":
in_tree_block = True
tree_block_string += nexus_string[x]
elif in_tree_block and nexus_string[x : x + 4] == "end;":
tree_block_string += nexus_string[x : x + 4]
break
elif in_tree_block:
tree_block_string += nexus_string[x]
if tree_block_string == "" or tree_block_string[-4:] != "end;":
raise ValueError("The Nexus string does not include a complete trees block.")
# From the Nexus format definition (Maddison et al. 1997):
# "Commands follow a simple format: the first token in the command
# is the command name, which is followed by a series of tokens and
# whitespace; the command is terminated by a semicolon."
# Get the commands from the tree block, ignoring the begin and end
# statements of the block.
tree_block_commands = tree_block_string.split(";")
tree_block_commands = [c.strip() for c in tree_block_commands]
assert tree_block_commands[0] == "begin trees", "Tree block malformed"
assert tree_block_commands[-1] == "", "Tree block malformed"
assert tree_block_commands[-2] == "end", "Tree block malformed"
assert len(tree_block_commands) > 3, "Tree block malformed"
tree_block_commands = tree_block_commands[1:-2]
# Ensure that exactly one of the commands is a translate command and
# exactly one is a tree command, which is the case when the Nexus file
# is written with TreeAnnotator based on a posterior tree distribution
# generated with StarBEAST.
translate_commands = []
tree_commands = []
for command in tree_block_commands:
command_list = command.split()
if command_list[0] == "translate":
translate_commands.append(command)
elif command_list[0] == "tree":
tree_commands.append(command)
if len(translate_commands) != 1:
err = "The Nexus string does not contain exactly one translate command."
raise ValueError(err)
if len(tree_commands) != 1:
err = "The Nexus string does not contain exactly one tree command."
raise ValueError(err)
translate_command = translate_commands[0]
tree_command = tree_commands[0]
assert "(" in tree_command, "No parentheses in tree string"
tree_string = tree_command[tree_command.find("(") :]
return translate_command, tree_string
| gpl-3.0 |
Kilhog/odoo | addons/sale_journal/__init__.py | 443 | 1067 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import sale_journal
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
nathanial/lettuce | tests/integration/lib/Django-1.2.5/tests/regressiontests/templates/nodelist.py | 50 | 1121 | from unittest import TestCase
from django.template.loader import get_template_from_string
from django.template import VariableNode
class NodelistTest(TestCase):
def test_for(self):
source = '{% for i in 1 %}{{ a }}{% endfor %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_if(self):
source = '{% if x %}{{ a }}{% endif %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_ifequal(self):
source = '{% ifequal x y %}{{ a }}{% endifequal %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
def test_ifchanged(self):
source = '{% ifchanged x %}{{ a }}{% endifchanged %}'
template = get_template_from_string(source)
vars = template.nodelist.get_nodes_by_type(VariableNode)
self.assertEqual(len(vars), 1)
| gpl-3.0 |
charukiewicz/beer-manager | venv/lib/python3.4/site-packages/flask/testsuite/config.py | 556 | 11820 | # -*- coding: utf-8 -*-
"""
flask.testsuite.config
~~~~~~~~~~~~~~~~~~~~~~
Configuration and instances.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import flask
import pkgutil
import unittest
from contextlib import contextmanager
from flask.testsuite import FlaskTestCase
# config keys used for the ConfigTestCase
TEST_KEY = 'foo'
SECRET_KEY = 'devkey'
class ConfigTestCase(FlaskTestCase):
def common_object_test(self, app):
self.assert_equal(app.secret_key, 'devkey')
self.assert_equal(app.config['TEST_KEY'], 'foo')
self.assert_not_in('ConfigTestCase', app.config)
def test_config_from_file(self):
app = flask.Flask(__name__)
app.config.from_pyfile(__file__.rsplit('.', 1)[0] + '.py')
self.common_object_test(app)
def test_config_from_object(self):
app = flask.Flask(__name__)
app.config.from_object(__name__)
self.common_object_test(app)
def test_config_from_class(self):
class Base(object):
TEST_KEY = 'foo'
class Test(Base):
SECRET_KEY = 'devkey'
app = flask.Flask(__name__)
app.config.from_object(Test)
self.common_object_test(app)
def test_config_from_envvar(self):
env = os.environ
try:
os.environ = {}
app = flask.Flask(__name__)
try:
app.config.from_envvar('FOO_SETTINGS')
except RuntimeError as e:
self.assert_true("'FOO_SETTINGS' is not set" in str(e))
else:
self.assert_true(0, 'expected exception')
self.assert_false(app.config.from_envvar('FOO_SETTINGS', silent=True))
os.environ = {'FOO_SETTINGS': __file__.rsplit('.', 1)[0] + '.py'}
self.assert_true(app.config.from_envvar('FOO_SETTINGS'))
self.common_object_test(app)
finally:
os.environ = env
def test_config_from_envvar_missing(self):
env = os.environ
try:
os.environ = {'FOO_SETTINGS': 'missing.cfg'}
try:
app = flask.Flask(__name__)
app.config.from_envvar('FOO_SETTINGS')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.fail('expected IOError')
self.assertFalse(app.config.from_envvar('FOO_SETTINGS', silent=True))
finally:
os.environ = env
def test_config_missing(self):
app = flask.Flask(__name__)
try:
app.config.from_pyfile('missing.cfg')
except IOError as e:
msg = str(e)
self.assert_true(msg.startswith('[Errno 2] Unable to load configuration '
'file (No such file or directory):'))
self.assert_true(msg.endswith("missing.cfg'"))
else:
self.assert_true(0, 'expected config')
self.assert_false(app.config.from_pyfile('missing.cfg', silent=True))
def test_session_lifetime(self):
app = flask.Flask(__name__)
app.config['PERMANENT_SESSION_LIFETIME'] = 42
self.assert_equal(app.permanent_session_lifetime.seconds, 42)
class LimitedLoaderMockWrapper(object):
def __init__(self, loader):
self.loader = loader
def __getattr__(self, name):
if name in ('archive', 'get_filename'):
msg = 'Mocking a loader which does not have `%s.`' % name
raise AttributeError(msg)
return getattr(self.loader, name)
@contextmanager
def patch_pkgutil_get_loader(wrapper_class=LimitedLoaderMockWrapper):
"""Patch pkgutil.get_loader to give loader without get_filename or archive.
This provides for tests where a system has custom loaders, e.g. Google App
Engine's HardenedModulesHook, which have neither the `get_filename` method
nor the `archive` attribute.
"""
old_get_loader = pkgutil.get_loader
def get_loader(*args, **kwargs):
return wrapper_class(old_get_loader(*args, **kwargs))
try:
pkgutil.get_loader = get_loader
yield
finally:
pkgutil.get_loader = old_get_loader
class InstanceTestCase(FlaskTestCase):
def test_explicit_instance_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
try:
flask.Flask(__name__, instance_path='instance')
except ValueError as e:
self.assert_in('must be absolute', str(e))
else:
self.fail('Expected value error')
app = flask.Flask(__name__, instance_path=here)
self.assert_equal(app.instance_path, here)
def test_main_module_paths(self):
# Test an app with '__main__' as the import name, uses cwd.
from main_app import app
here = os.path.abspath(os.getcwd())
self.assert_equal(app.instance_path, os.path.join(here, 'instance'))
if 'main_app' in sys.modules:
del sys.modules['main_app']
def test_uninstalled_module_paths(self):
from config_module_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_uninstalled_package_paths(self):
from config_package_app import app
here = os.path.abspath(os.path.dirname(__file__))
self.assert_equal(app.instance_path, os.path.join(here, 'test_apps', 'instance'))
def test_installed_module_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_module_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_app
self.assert_equal(site_app.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_app-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_app' in sys.modules:
del sys.modules['site_app']
def test_installed_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_installed_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
installed_path = os.path.join(expected_prefix, 'path')
sys.path.append(installed_path)
with patch_pkgutil_get_loader():
try:
import installed_package
self.assert_equal(installed_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'installed_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(installed_path)
if 'installed_package' in sys.modules:
del sys.modules['installed_package']
def test_prefix_package_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_prefix_package_paths_with_limited_loader(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
sys.path.append(site_packages)
with patch_pkgutil_get_loader():
try:
import site_package
self.assert_equal(site_package.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_package-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
if 'site_package' in sys.modules:
del sys.modules['site_package']
def test_egg_installed_paths(self):
here = os.path.abspath(os.path.dirname(__file__))
expected_prefix = os.path.join(here, 'test_apps')
real_prefix, sys.prefix = sys.prefix, expected_prefix
site_packages = os.path.join(expected_prefix, 'lib', 'python2.5', 'site-packages')
egg_path = os.path.join(site_packages, 'SiteEgg.egg')
sys.path.append(site_packages)
sys.path.append(egg_path)
try:
import site_egg # in SiteEgg.egg
self.assert_equal(site_egg.app.instance_path,
os.path.join(expected_prefix, 'var',
'site_egg-instance'))
finally:
sys.prefix = real_prefix
sys.path.remove(site_packages)
sys.path.remove(egg_path)
if 'site_egg' in sys.modules:
del sys.modules['site_egg']
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ConfigTestCase))
suite.addTest(unittest.makeSuite(InstanceTestCase))
return suite
| mit |
eric-dowty/JavaScript-2048 | node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/MSVSUtil.py | 566 | 9386 | # Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Utility functions shared amongst the Windows generators."""
import copy
import os
_TARGET_TYPE_EXT = {
'executable': '.exe',
'loadable_module': '.dll',
'shared_library': '.dll',
}
def _GetLargePdbShimCcPath():
"""Returns the path of the large_pdb_shim.cc file."""
this_dir = os.path.abspath(os.path.dirname(__file__))
src_dir = os.path.abspath(os.path.join(this_dir, '..', '..'))
win_data_dir = os.path.join(src_dir, 'data', 'win')
large_pdb_shim_cc = os.path.join(win_data_dir, 'large-pdb-shim.cc')
return large_pdb_shim_cc
def _DeepCopySomeKeys(in_dict, keys):
"""Performs a partial deep-copy on |in_dict|, only copying the keys in |keys|.
Arguments:
in_dict: The dictionary to copy.
keys: The keys to be copied. If a key is in this list and doesn't exist in
|in_dict| this is not an error.
Returns:
The partially deep-copied dictionary.
"""
d = {}
for key in keys:
if key not in in_dict:
continue
d[key] = copy.deepcopy(in_dict[key])
return d
def _SuffixName(name, suffix):
"""Add a suffix to the end of a target.
Arguments:
name: name of the target (foo#target)
suffix: the suffix to be added
Returns:
Target name with suffix added (foo_suffix#target)
"""
parts = name.rsplit('#', 1)
parts[0] = '%s_%s' % (parts[0], suffix)
return '#'.join(parts)
def _ShardName(name, number):
"""Add a shard number to the end of a target.
Arguments:
name: name of the target (foo#target)
number: shard number
Returns:
Target name with shard added (foo_1#target)
"""
return _SuffixName(name, str(number))
def ShardTargets(target_list, target_dicts):
"""Shard some targets apart to work around the linkers limits.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
Returns:
Tuple of the new sharded versions of the inputs.
"""
# Gather the targets to shard, and how many pieces.
targets_to_shard = {}
for t in target_dicts:
shards = int(target_dicts[t].get('msvs_shard', 0))
if shards:
targets_to_shard[t] = shards
# Shard target_list.
new_target_list = []
for t in target_list:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
new_target_list.append(_ShardName(t, i))
else:
new_target_list.append(t)
# Shard target_dict.
new_target_dicts = {}
for t in target_dicts:
if t in targets_to_shard:
for i in range(targets_to_shard[t]):
name = _ShardName(t, i)
new_target_dicts[name] = copy.copy(target_dicts[t])
new_target_dicts[name]['target_name'] = _ShardName(
new_target_dicts[name]['target_name'], i)
sources = new_target_dicts[name].get('sources', [])
new_sources = []
for pos in range(i, len(sources), targets_to_shard[t]):
new_sources.append(sources[pos])
new_target_dicts[name]['sources'] = new_sources
else:
new_target_dicts[t] = target_dicts[t]
# Shard dependencies.
for t in new_target_dicts:
dependencies = copy.copy(new_target_dicts[t].get('dependencies', []))
new_dependencies = []
for d in dependencies:
if d in targets_to_shard:
for i in range(targets_to_shard[d]):
new_dependencies.append(_ShardName(d, i))
else:
new_dependencies.append(d)
new_target_dicts[t]['dependencies'] = new_dependencies
return (new_target_list, new_target_dicts)
def _GetPdbPath(target_dict, config_name, vars):
"""Returns the path to the PDB file that will be generated by a given
configuration.
The lookup proceeds as follows:
- Look for an explicit path in the VCLinkerTool configuration block.
- Look for an 'msvs_large_pdb_path' variable.
- Use '<(PRODUCT_DIR)/<(product_name).(exe|dll).pdb' if 'product_name' is
specified.
- Use '<(PRODUCT_DIR)/<(target_name).(exe|dll).pdb'.
Arguments:
target_dict: The target dictionary to be searched.
config_name: The name of the configuration of interest.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
The path of the corresponding PDB file.
"""
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.get('VCLinkerTool', {})
pdb_path = linker.get('ProgramDatabaseFile')
if pdb_path:
return pdb_path
variables = target_dict.get('variables', {})
pdb_path = variables.get('msvs_large_pdb_path', None)
if pdb_path:
return pdb_path
pdb_base = target_dict.get('product_name', target_dict['target_name'])
pdb_base = '%s%s.pdb' % (pdb_base, _TARGET_TYPE_EXT[target_dict['type']])
pdb_path = vars['PRODUCT_DIR'] + '/' + pdb_base
return pdb_path
def InsertLargePdbShims(target_list, target_dicts, vars):
"""Insert a shim target that forces the linker to use 4KB pagesize PDBs.
This is a workaround for targets with PDBs greater than 1GB in size, the
limit for the 1KB pagesize PDBs created by the linker by default.
Arguments:
target_list: List of target pairs: 'base/base.gyp:base'.
target_dicts: Dict of target properties keyed on target pair.
vars: A dictionary of common GYP variables with generator-specific values.
Returns:
Tuple of the shimmed version of the inputs.
"""
# Determine which targets need shimming.
targets_to_shim = []
for t in target_dicts:
target_dict = target_dicts[t]
# We only want to shim targets that have msvs_large_pdb enabled.
if not int(target_dict.get('msvs_large_pdb', 0)):
continue
# This is intended for executable, shared_library and loadable_module
# targets where every configuration is set up to produce a PDB output.
# If any of these conditions is not true then the shim logic will fail
# below.
targets_to_shim.append(t)
large_pdb_shim_cc = _GetLargePdbShimCcPath()
for t in targets_to_shim:
target_dict = target_dicts[t]
target_name = target_dict.get('target_name')
base_dict = _DeepCopySomeKeys(target_dict,
['configurations', 'default_configuration', 'toolset'])
# This is the dict for copying the source file (part of the GYP tree)
# to the intermediate directory of the project. This is necessary because
# we can't always build a relative path to the shim source file (on Windows
# GYP and the project may be on different drives), and Ninja hates absolute
# paths (it ends up generating the .obj and .obj.d alongside the source
# file, polluting GYPs tree).
copy_suffix = 'large_pdb_copy'
copy_target_name = target_name + '_' + copy_suffix
full_copy_target_name = _SuffixName(t, copy_suffix)
shim_cc_basename = os.path.basename(large_pdb_shim_cc)
shim_cc_dir = vars['SHARED_INTERMEDIATE_DIR'] + '/' + copy_target_name
shim_cc_path = shim_cc_dir + '/' + shim_cc_basename
copy_dict = copy.deepcopy(base_dict)
copy_dict['target_name'] = copy_target_name
copy_dict['type'] = 'none'
copy_dict['sources'] = [ large_pdb_shim_cc ]
copy_dict['copies'] = [{
'destination': shim_cc_dir,
'files': [ large_pdb_shim_cc ]
}]
# This is the dict for the PDB generating shim target. It depends on the
# copy target.
shim_suffix = 'large_pdb_shim'
shim_target_name = target_name + '_' + shim_suffix
full_shim_target_name = _SuffixName(t, shim_suffix)
shim_dict = copy.deepcopy(base_dict)
shim_dict['target_name'] = shim_target_name
shim_dict['type'] = 'static_library'
shim_dict['sources'] = [ shim_cc_path ]
shim_dict['dependencies'] = [ full_copy_target_name ]
# Set up the shim to output its PDB to the same location as the final linker
# target.
for config_name, config in shim_dict.get('configurations').iteritems():
pdb_path = _GetPdbPath(target_dict, config_name, vars)
# A few keys that we don't want to propagate.
for key in ['msvs_precompiled_header', 'msvs_precompiled_source', 'test']:
config.pop(key, None)
msvs = config.setdefault('msvs_settings', {})
# Update the compiler directives in the shim target.
compiler = msvs.setdefault('VCCLCompilerTool', {})
compiler['DebugInformationFormat'] = '3'
compiler['ProgramDataBaseFileName'] = pdb_path
# Set the explicit PDB path in the appropriate configuration of the
# original target.
config = target_dict['configurations'][config_name]
msvs = config.setdefault('msvs_settings', {})
linker = msvs.setdefault('VCLinkerTool', {})
linker['GenerateDebugInformation'] = 'true'
linker['ProgramDatabaseFile'] = pdb_path
# Add the new targets. They must go to the beginning of the list so that
# the dependency generation works as expected in ninja.
target_list.insert(0, full_copy_target_name)
target_list.insert(0, full_shim_target_name)
target_dicts[full_copy_target_name] = copy_dict
target_dicts[full_shim_target_name] = shim_dict
# Update the original target to depend on the shim target.
target_dict.setdefault('dependencies', []).append(full_shim_target_name)
return (target_list, target_dicts) | mit |
SouthStar/portia | slyd/slyd/splash/proxy.py | 5 | 4004 | from __future__ import absolute_import
import functools
import requests
from twisted.internet.threads import deferToThread
from twisted.internet.defer import CancelledError
from twisted.web.resource import Resource
from twisted.web.server import NOT_DONE_YET
from PyQt4.QtNetwork import QNetworkRequest
from .ferry import User
from .css_utils import process_css
class ProxyResource(Resource):
def render_GET(self, request):
if not request.auth_info or not request.auth_info.get('username', None):
return self._error(request, 403, 'Auth required')
for arg in 'url', 'referer', 'tabid':
if arg not in request.args or len(request.args[arg]) != 1:
return self._error(request, 400, 'Argument required: {}'.format(arg))
url = request.args['url'][0]
referer = request.args['referer'][0]
tabid = int(request.args['tabid'][0])
user = User.findById(tabid)
# It's not easy to cancel a request that's being made by splash, because it does't
# return the QNetworkReply and when redirecting the current QNetworkReply changes,
# so if the client closes the connection while fetching the content we simply note
# it in this object and let the request finish without aborting.
connection_status = { "finished": False }
cb = functools.partial(self.end_response, request, url, connection_status, tabid)
if not user or not user.tab:
d = deferToThread(requests.get, url, headers={'referer': referer})
d.addCallback(cb)
d.addErrback(self._requestError, request)
request.notifyFinish().addErrback(self._requestDisconnect, deferred=d)
return NOT_DONE_YET
if request.auth_info['username'] != user.auth['username']:
return self._error(request, 403, "You don't own that browser session")
request.notifyFinish().addErrback(self._requestDisconnect, None, connection_status)
user.tab.http_client.get(url, cb, headers={'referer': referer})
return NOT_DONE_YET
def _requestError(self, err, request):
if not err.check(CancelledError):
request.setResponseCode(500)
request.write('Error fetching the content')
request.finish()
def _requestDisconnect(self, err, deferred=None, connection_status=None):
if deferred:
deferred.cancel()
if connection_status:
connection_status["finished"] = True
def end_response(self, request, original_url, connection_status, tabid, reply):
if connection_status["finished"]:
return
if hasattr(reply, 'readAll'):
content = str(reply.readAll())
status_code = reply.attribute(QNetworkRequest.HttpStatusCodeAttribute).toPyObject()
request.setResponseCode(status_code or 500)
else:
content = ''.join(chunk for chunk in reply.iter_content(65535))
redirect_url = None
request.setResponseCode(reply.status_code)
headers = {
'cache-control': 'private',
'pragma': 'no-cache',
'content-type': 'application/octet-stream',
}
for header in ('content-type', 'cache-control', 'pragma', 'vary',
'max-age'):
if hasattr(reply, 'hasRawHeader') and reply.hasRawHeader(header):
headers[header] = str(reply.rawHeader(header))
elif hasattr(reply, 'headers') and header in reply.headers:
headers[header] = str(reply.headers.get(header))
if header in headers:
request.setHeader(header, headers[header])
if headers['content-type'].strip().startswith('text/css'):
content = process_css(content, tabid, original_url)
request.write(content)
request.finish()
def _error(self, request, code, message):
request.setResponseCode(code)
return message
| bsd-3-clause |
oasiswork/odoo | addons/base_report_designer/openerp_sxw2rml/openerp_sxw2rml.py | 301 | 14179 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c):
#
# 2005 pyopenoffice.py Martin Simon (http://www.bezirksreiter.de)
# 2005 Fabien Pinckaers, TINY SPRL. (http://tiny.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#!/usr/bin/python
"""
OpenERP SXW2RML - The OpenERP's report engine
OpenERP SXW2RML is part of the OpenERP Report Project.
OpenERP Report is a module that allows you to render high quality PDF document
from an OpenOffice template (.sxw) and any relationl database.
"""
__version__ = '0.9'
import re
import string
import os
import zipfile
import xml.dom.minidom
from reportlab.lib.units import toLength
import base64
import copy
class DomApiGeneral:
"""General DOM API utilities."""
def __init__(self, content_string="", file=""):
self.content_string = content_string
self.re_digits = re.compile(r"(.*?\d)(pt|cm|mm|inch|in)")
def _unitTuple(self, string):
"""Split values and units to a tuple."""
temp = self.re_digits.findall(string)
if not temp:
return (string,"")
else:
return (temp[0])
def stringPercentToFloat(self, string):
temp = string.replace("""%""","")
return float(temp)/100
def findChildrenByName(self, parent, name, attr_dict=None):
"""Helper functions. Does not work recursively.
Optional: also test for certain attribute/value pairs."""
if attr_dict is None:
attr_dict = {}
children = []
for c in parent.childNodes:
if c.nodeType == c.ELEMENT_NODE and c.nodeName == name:
children.append(c)
if attr_dict == {}:
return children
else:
return self._selectForAttributes(nodelist=children,attr_dict=attr_dict)
def _selectForAttributes(self, nodelist, attr_dict):
"Helper function."""
selected_nodes = []
for n in nodelist:
check = 1
for a in attr_dict.keys():
if n.getAttribute(a) != attr_dict[a]:
# at least one incorrect attribute value?
check = 0
if check:
selected_nodes.append(n)
return selected_nodes
def _stringToTuple(self, s):
"""Helper function."""
try:
temp = string.split(s,",")
return int(temp[0]),int(temp[1])
except:
return None
def _tupleToString(self, t):
try:
return self.openOfficeStringUtf8("%s,%s" % (t[0],t[1]))
except:
return None
def _lengthToFloat(self, value):
v = value
if not self.re_digits.search(v):
return v
try:
if v[-4:] == "inch":
# OO files use "inch" instead of "in" in Reportlab units
v = v[:-2]
except:
pass
try:
c = round(toLength(v))
return c
except:
return v
def openOfficeStringUtf8(self, string):
if type(string) == unicode:
return string.encode("utf-8")
tempstring = unicode(string,"cp1252").encode("utf-8")
return tempstring
class DomApi(DomApiGeneral):
"""This class provides a DOM-API for XML-Files from an SXW-Archive."""
def __init__(self, xml_content, xml_styles):
DomApiGeneral.__init__(self)
self.content_dom = xml.dom.minidom.parseString(xml_content)
self.styles_dom = xml.dom.minidom.parseString(xml_styles)
body = self.content_dom.getElementsByTagName("office:body")
self.body = body and body[0]
# TODO:
self.style_dict = {}
self.style_properties_dict = {}
# ******** always use the following order:
self.buildStyleDict()
self.buildStylePropertiesDict()
if self.styles_dom.getElementsByTagName("style:page-master").__len__()<>0:
self.page_master = self.styles_dom.getElementsByTagName("style:page-master")[0]
if self.styles_dom.getElementsByTagName("style:page-layout").__len__()<>0 :
self.page_master = self.styles_dom.getElementsByTagName("style:page-layout")[0]
self.document = self.content_dom.getElementsByTagName("office:document-content")[0]
def buildStylePropertiesDict(self):
for s in self.style_dict.keys():
self.style_properties_dict[s] = self.getStylePropertiesDict(s)
def updateWithPercents(self, dict, updatedict):
"""Sometimes you find values like "115%" in the style hierarchy."""
if not updatedict:
# no style hierarchies for this style? =>
return
new_updatedict = copy.copy(updatedict)
for u in new_updatedict.keys():
try:
if new_updatedict[u].find("""%""") != -1 and dict.has_key(u):
number = float(self.re_digits.search(dict[u]).group(1))
unit = self.re_digits.search(dict[u]).group(2)
new_number = self.stringPercentToFloat(new_updatedict[u]) * number
if unit == "pt":
new_number = int(new_number)
# no floats allowed for "pt"
# OOo just takes the int, does not round (try it out!)
new_updatedict[u] = "%s%s" % (new_number,unit)
else:
dict[u] = new_updatedict[u]
except:
dict[u] = new_updatedict[u]
dict.update(new_updatedict)
def normalizeStyleProperties(self):
"""Transfer all style:style-properties attributes from the
self.style_properties_hierarchical dict to the automatic-styles
from content.xml. Use this function to preprocess content.xml for
XSLT transformations etc.Do not try to implement this function
with XSlT - believe me, it's a terrible task..."""
styles_styles = self.styles_dom.getElementsByTagName("style:style")
automatic_styles = self.content_dom.getElementsByTagName("office:automatic-styles")[0]
for s in styles_styles:
automatic_styles.appendChild(s.cloneNode(deep=1))
content_styles = self.content_dom.getElementsByTagName("style:style")
# these are the content_styles with styles_styles added!!!
for s in content_styles:
c = self.findChildrenByName(s,"style:properties")
if c == []:
# some derived automatic styles do not have "style:properties":
temp = self.content_dom.createElement("style:properties")
s.appendChild(temp)
c = self.findChildrenByName(s,"style:properties")
c = c[0]
dict = self.style_properties_dict[(s.getAttribute("style:name")).encode("utf-8")] or {}
for attribute in dict.keys():
c.setAttribute(self.openOfficeStringUtf8(attribute),self.openOfficeStringUtf8(dict[attribute]))
def transferStylesXml(self):
"""Transfer certain sub-trees from styles.xml to the normalized content.xml
(see above). It is not necessary to do this - for example - with paragraph styles.
the "normalized" style properties contain all information needed for
further processing."""
# TODO: What about table styles etc.?
outline_styles = self.styles_dom.getElementsByTagName("text:outline-style")
t = self.content_dom.createElement("transferredfromstylesxml")
self.document.insertBefore(t,self.body)
t_new = self.body.previousSibling
try:
page_master = self.page_master
t_new.appendChild(page_master.cloneNode(deep=1))
t_new.appendChild(outline_styles[0].cloneNode(deep=1))
except:
pass
def normalizeLength(self):
"""Normalize all lengthes to floats (i.e: 1 inch = 72).
Always use this after "normalizeContent" and "transferStyles"!"""
# TODO: The complex attributes of table cell styles are not transferred yet.
#all_styles = self.content_dom.getElementsByTagName("style:properties")
#all_styles += self.content_dom.getElementsByTagName("draw:image")
all_styles = self.content_dom.getElementsByTagName("*")
for s in all_styles:
for x in s._attrs.keys():
v = s.getAttribute(x)
s.setAttribute(x,"%s" % self._lengthToFloat(v))
# convert float to string first!
def normalizeTableColumns(self):
"""Handle this strange table:number-columns-repeated attribute."""
columns = self.content_dom.getElementsByTagName("table:table-column")
for c in columns:
if c.hasAttribute("table:number-columns-repeated"):
number = int(c.getAttribute("table:number-columns-repeated"))
c.removeAttribute("table:number-columns-repeated")
for i in range(number-1):
(c.parentNode).insertBefore(c.cloneNode(deep=1),c)
def buildStyleDict(self):
"""Store all style:style-nodes from content.xml and styles.xml in self.style_dict.
Caution: in this dict the nodes from two dom apis are merged!"""
for st in (self.styles_dom,self.content_dom):
for s in st.getElementsByTagName("style:style"):
name = s.getAttribute("style:name").encode("utf-8")
self.style_dict[name] = s
return True
def toxml(self):
return self.content_dom.toxml(encoding="utf-8")
def getStylePropertiesDict(self, style_name):
res = {}
if self.style_dict[style_name].hasAttribute("style:parent-style-name"):
parent = self.style_dict[style_name].getAttribute("style:parent-style-name").encode("utf-8")
res = self.getStylePropertiesDict(parent)
children = self.style_dict[style_name].childNodes
for c in children:
if c.nodeType == c.ELEMENT_NODE and c.nodeName.find("properties")>0 :
for attr in c._attrs.keys():
res[attr] = c.getAttribute(attr).encode("utf-8")
return res
class PyOpenOffice(object):
"""This is the main class which provides all functionality."""
def __init__(self, path='.', save_pict=False):
self.path = path
self.save_pict = save_pict
self.images = {}
def oo_read(self, fname):
z = zipfile.ZipFile(fname,"r")
content = z.read('content.xml')
style = z.read('styles.xml')
all = z.namelist()
for a in all:
if a[:9]=='Pictures/' and len(a)>10:
pic_content = z.read(a)
self.images[a[9:]] = pic_content
if self.save_pict:
f=open(os.path.join(self.path, os.path.basename(a)),"wb")
f.write(pic_content)
f.close()
z.close()
return content,style
def oo_replace(self, content):
regex = [
(r"<para[^>]*/>", ""),
(r"<para(.*)>(.*?)<text:line-break[^>]*/>", "<para$1>$2</para><para$1>"),
]
for key,val in regex:
content = re.sub(key, val, content)
return content
def unpackNormalize(self, sourcefile):
c,s = self.oo_read(sourcefile)
c = self.oo_replace(c)
dom = DomApi(c,s)
dom.normalizeStyleProperties()
dom.transferStylesXml()
dom.normalizeLength()
dom.normalizeTableColumns()
new_c = dom.toxml()
return new_c
def sxw2rml(sxw_file, xsl, output='.', save_pict=False):
from lxml import etree
from StringIO import StringIO
tool = PyOpenOffice(output, save_pict = save_pict)
res = tool.unpackNormalize(sxw_file)
f = StringIO(xsl)
styledoc = etree.parse(f)
style = etree.XSLT(styledoc)
f = StringIO(res)
doc = etree.parse(f)
result = style(doc)
root = etree.XPathEvaluator(result)("/document/stylesheet")
if root:
root=root[0]
images = etree.Element("images")
for img in tool.images:
node = etree.Element('image', name=img)
node.text = base64.encodestring(tool.images[img])
images.append(node)
root.append(images)
try:
xml = str(result)
return xml
except:
return result
if __name__ == "__main__":
import optparse
parser = optparse.OptionParser(
version="Odoo Report v%s" % __version__,
usage = 'openerp_sxw2rml.py [options] file.sxw')
parser.add_option("-v", "--verbose", default=False, dest="verbose", help="enable basic debugging")
parser.add_option("-o", "--output", dest="output", default='.', help="directory of image output")
(opt, args) = parser.parse_args()
if len(args) != 1:
parser.error("Incorrect number of arguments.")
import sys
fname = sys.argv[1]
f = fname
xsl_file = 'normalized_oo2rml.xsl'
z = zipfile.ZipFile(fname,"r")
mimetype = z.read('mimetype')
if mimetype.split('/')[-1] == 'vnd.oasis.opendocument.text' :
xsl_file = 'normalized_odt2rml.xsl'
xsl = file(os.path.join(os.getcwd(), os.path.dirname(sys.argv[0]), xsl_file)).read()
result = sxw2rml(f, xsl, output=opt.output, save_pict=False)
print result
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
pilou-/ansible | test/integration/targets/azure_rm_keyvault/lookup_plugins/azure_service_principal_attribute.py | 84 | 3519 | # (c) 2018 Yunge Zhu, <yungez@microsoft.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: azure_service_principal_attribute
requirements:
- azure-graphrbac
author:
- Yunge Zhu <yungez@microsoft.com>
version_added: "2.7"
short_description: Look up Azure service principal attributes.
description:
- Describes object id of your Azure service principal account.
options:
azure_client_id:
description: azure service principal client id.
azure_secret:
description: azure service principal secret
azure_tenant:
description: azure tenant
azure_cloud_environment:
description: azure cloud environment
"""
EXAMPLES = """
set_fact:
object_id: "{{ lookup('azure_service_principal_attribute',
azure_client_id=azure_client_id,
azure_secret=azure_secret,
azure_tenant=azure_secret) }}"
"""
RETURN = """
_raw:
description:
Returns object id of service principal.
"""
from ansible.errors import AnsibleError
from ansible.plugins import AnsiblePlugin
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_native
try:
from azure.common.credentials import ServicePrincipalCredentials
from azure.graphrbac import GraphRbacManagementClient
from msrestazure import azure_cloud
from msrestazure.azure_exceptions import CloudError
except ImportError:
raise AnsibleError(
"The lookup azure_service_principal_attribute requires azure.graphrbac, msrest")
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
self.set_options(direct=kwargs)
credentials = {}
credentials['azure_client_id'] = self.get_option('azure_client_id', None)
credentials['azure_secret'] = self.get_option('azure_secret', None)
credentials['azure_tenant'] = self.get_option('azure_tenant', 'common')
if credentials['azure_client_id'] is None or credentials['azure_secret'] is None:
raise AnsibleError("Must specify azure_client_id and azure_secret")
_cloud_environment = azure_cloud.AZURE_PUBLIC_CLOUD
if self.get_option('azure_cloud_environment', None) is not None:
cloud_environment = azure_cloud.get_cloud_from_metadata_endpoint(credentials['azure_cloud_environment'])
try:
azure_credentials = ServicePrincipalCredentials(client_id=credentials['azure_client_id'],
secret=credentials['azure_secret'],
tenant=credentials['azure_tenant'],
resource=_cloud_environment.endpoints.active_directory_graph_resource_id)
client = GraphRbacManagementClient(azure_credentials, credentials['azure_tenant'],
base_url=_cloud_environment.endpoints.active_directory_graph_resource_id)
response = list(client.service_principals.list(filter="appId eq '{0}'".format(credentials['azure_client_id'])))
sp = response[0]
return sp.object_id.split(',')
except CloudError as ex:
raise AnsibleError("Failed to get service principal object id: %s" % to_native(ex))
return False
| gpl-3.0 |
supriyantomaftuh/django | tests/view_tests/tests/py3_test_debug.py | 335 | 1849 | """
Since this file contains Python 3 specific syntax, it's named without a test_
prefix so the test runner won't try to import it. Instead, the test class is
imported in test_debug.py, but only on Python 3.
This filename is also in setup.cfg flake8 exclude since the Python 2 syntax
error (raise ... from ...) can't be silenced using NOQA.
"""
import sys
from django.test import RequestFactory, TestCase
from django.views.debug import ExceptionReporter
class Py3ExceptionReporterTests(TestCase):
rf = RequestFactory()
def test_reporting_of_nested_exceptions(self):
request = self.rf.get('/test_view/')
try:
try:
raise AttributeError('Top level')
except AttributeError as explicit:
try:
raise ValueError('Second exception') from explicit
except ValueError:
raise IndexError('Final exception')
except Exception:
# Custom exception handler, just pass it into ExceptionReporter
exc_type, exc_value, tb = sys.exc_info()
explicit_exc = 'The above exception ({0}) was the direct cause of the following exception:'
implicit_exc = 'During handling of the above exception ({0}), another exception occurred:'
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
# Both messages are twice on page -- one rendered as html,
# one as plain text (for pastebin)
self.assertEqual(2, html.count(explicit_exc.format("Top level")))
self.assertEqual(2, html.count(implicit_exc.format("Second exception")))
text = reporter.get_traceback_text()
self.assertIn(explicit_exc.format("Top level"), text)
self.assertIn(implicit_exc.format("Second exception"), text)
| bsd-3-clause |
Verteiron/JContainers | JContainers/lib/boost/tools/build/v2/build/configure.py | 44 | 5289 | # Status: ported.
# Base revison: 64488
#
# Copyright (c) 2010 Vladimir Prus.
#
# Use, modification and distribution is subject to the Boost Software
# License Version 1.0. (See accompanying file LICENSE_1_0.txt or
# http://www.boost.org/LICENSE_1_0.txt)
# This module defines function to help with two main tasks:
#
# - Discovering build-time configuration for the purposes of adjusting
# build process.
# - Reporting what is built, and how it is configured.
import b2.build.property as property
import b2.build.property_set as property_set
import b2.build.targets
from b2.manager import get_manager
from b2.util.sequence import unique
from b2.util import bjam_signature, value_to_jam
import bjam
import os
__width = 30
def set_width(width):
global __width
__width = 30
__components = []
__built_components = []
__component_logs = {}
__announced_checks = False
__log_file = None
__log_fd = -1
def register_components(components):
"""Declare that the components specified by the parameter exist."""
__components.extend(components)
def components_building(components):
"""Declare that the components specified by the parameters will be build."""
__built_components.extend(components)
def log_component_configuration(component, message):
"""Report something about component configuration that the user should better know."""
__component_logs.setdefault(component, []).append(message)
def log_check_result(result):
global __announced_checks
if not __announced_checks:
print "Performing configuration checks"
__announced_checks = True
print result
def log_library_search_result(library, result):
log_check_result((" - %(library)s : %(result)s" % locals()).rjust(width))
def print_component_configuration():
print "\nComponent configuration:"
for c in __components:
if c in __built_components:
s = "building"
else:
s = "not building"
message = " - %s)" % c
message = message.rjust(__width)
message += " : " + s
for m in __component_logs.get(c, []):
print " -" + m
print ""
__builds_cache = {}
def builds(metatarget_reference, project, ps, what):
# Attempt to build a metatarget named by 'metatarget-reference'
# in context of 'project' with properties 'ps'.
# Returns non-empty value if build is OK.
result = []
existing = __builds_cache.get((what, ps), None)
if existing is None:
result = False
__builds_cache[(what, ps)] = False
targets = b2.build.targets.generate_from_reference(
metatarget_reference, project, ps).targets()
jam_targets = []
for t in targets:
jam_targets.append(t.actualize())
x = (" - %s" % what).rjust(__width)
if bjam.call("UPDATE_NOW", jam_targets, str(__log_fd), "ignore-minus-n"):
__builds_cache[(what, ps)] = True
result = True
log_check_result("%s: yes" % x)
else:
log_check_result("%s: no" % x)
return result
else:
return existing
def set_log_file(log_file_name):
# Called by Boost.Build startup code to specify name of a file
# that will receive results of configure checks. This
# should never be called by users.
global __log_file, __log_fd
dirname = os.path.dirname(log_file_name)
if not os.path.exists(dirname):
os.makedirs(dirname)
# Make sure to keep the file around, so that it's not
# garbage-collected and closed
__log_file = open(log_file_name, "w")
__log_fd = __log_file.fileno()
# Frontend rules
class CheckTargetBuildsWorker:
def __init__(self, target, true_properties, false_properties):
self.target = target
self.true_properties = property.create_from_strings(true_properties, True)
self.false_properties = property.create_from_strings(false_properties, True)
def check(self, ps):
# FIXME: this should not be hardcoded. Other checks might
# want to consider different set of features as relevant.
toolset = ps.get('toolset')[0]
toolset_version_property = "<toolset-" + toolset + ":version>" ;
relevant = ps.get_properties('target-os') + \
ps.get_properties("toolset") + \
ps.get_properties(toolset_version_property) + \
ps.get_properties("address-model") + \
ps.get_properties("architecture")
rps = property_set.create(relevant)
t = get_manager().targets().current()
p = t.project()
if builds(self.target, p, rps, "%s builds" % self.target):
choosen = self.true_properties
else:
choosen = self.false_properties
return property.evaluate_conditionals_in_context(choosen, ps)
@bjam_signature((["target"], ["true_properties", "*"], ["false_properties", "*"]))
def check_target_builds(target, true_properties, false_properties):
worker = CheckTargetBuildsWorker(target, true_properties, false_properties)
value = value_to_jam(worker.check)
return "<conditional>" + value
get_manager().projects().add_rule("check-target-builds", check_target_builds)
| mit |
slisson/intellij-community | python/lib/Lib/site-packages/django/contrib/messages/storage/base.py | 399 | 6134 | from django.conf import settings
from django.utils.encoding import force_unicode, StrAndUnicode
from django.contrib.messages import constants, utils
LEVEL_TAGS = utils.get_level_tags()
class Message(StrAndUnicode):
"""
Represents an actual message that can be stored in any of the supported
storage classes (typically session- or cookie-based) and rendered in a view
or template.
"""
def __init__(self, level, message, extra_tags=None):
self.level = int(level)
self.message = message
self.extra_tags = extra_tags
def _prepare(self):
"""
Prepares the message for serialization by forcing the ``message``
and ``extra_tags`` to unicode in case they are lazy translations.
Known "safe" types (None, int, etc.) are not converted (see Django's
``force_unicode`` implementation for details).
"""
self.message = force_unicode(self.message, strings_only=True)
self.extra_tags = force_unicode(self.extra_tags, strings_only=True)
def __eq__(self, other):
return isinstance(other, Message) and self.level == other.level and \
self.message == other.message
def __unicode__(self):
return force_unicode(self.message)
def _get_tags(self):
label_tag = force_unicode(LEVEL_TAGS.get(self.level, ''),
strings_only=True)
extra_tags = force_unicode(self.extra_tags, strings_only=True)
if extra_tags and label_tag:
return u' '.join([extra_tags, label_tag])
elif extra_tags:
return extra_tags
elif label_tag:
return label_tag
return ''
tags = property(_get_tags)
class BaseStorage(object):
"""
This is the base backend for temporary message storage.
This is not a complete class; to be a usable storage backend, it must be
subclassed and the two methods ``_get`` and ``_store`` overridden.
"""
def __init__(self, request, *args, **kwargs):
self.request = request
self._queued_messages = []
self.used = False
self.added_new = False
super(BaseStorage, self).__init__(*args, **kwargs)
def __len__(self):
return len(self._loaded_messages) + len(self._queued_messages)
def __iter__(self):
self.used = True
if self._queued_messages:
self._loaded_messages.extend(self._queued_messages)
self._queued_messages = []
return iter(self._loaded_messages)
def __contains__(self, item):
return item in self._loaded_messages or item in self._queued_messages
@property
def _loaded_messages(self):
"""
Returns a list of loaded messages, retrieving them first if they have
not been loaded yet.
"""
if not hasattr(self, '_loaded_data'):
messages, all_retrieved = self._get()
self._loaded_data = messages or []
return self._loaded_data
def _get(self, *args, **kwargs):
"""
Retrieves a list of stored messages. Returns a tuple of the messages
and a flag indicating whether or not all the messages originally
intended to be stored in this storage were, in fact, stored and
retrieved; e.g., ``(messages, all_retrieved)``.
**This method must be implemented by a subclass.**
If it is possible to tell if the backend was not used (as opposed to
just containing no messages) then ``None`` should be returned in
place of ``messages``.
"""
raise NotImplementedError()
def _store(self, messages, response, *args, **kwargs):
"""
Stores a list of messages, returning a list of any messages which could
not be stored.
One type of object must be able to be stored, ``Message``.
**This method must be implemented by a subclass.**
"""
raise NotImplementedError()
def _prepare_messages(self, messages):
"""
Prepares a list of messages for storage.
"""
for message in messages:
message._prepare()
def update(self, response):
"""
Stores all unread messages.
If the backend has yet to be iterated, previously stored messages will
be stored again. Otherwise, only messages added after the last
iteration will be stored.
"""
self._prepare_messages(self._queued_messages)
if self.used:
return self._store(self._queued_messages, response)
elif self.added_new:
messages = self._loaded_messages + self._queued_messages
return self._store(messages, response)
def add(self, level, message, extra_tags=''):
"""
Queues a message to be stored.
The message is only queued if it contained something and its level is
not less than the recording level (``self.level``).
"""
if not message:
return
# Check that the message level is not less than the recording level.
level = int(level)
if level < self.level:
return
# Add the message.
self.added_new = True
message = Message(level, message, extra_tags=extra_tags)
self._queued_messages.append(message)
def _get_level(self):
"""
Returns the minimum recorded level.
The default level is the ``MESSAGE_LEVEL`` setting. If this is
not found, the ``INFO`` level is used.
"""
if not hasattr(self, '_level'):
self._level = getattr(settings, 'MESSAGE_LEVEL', constants.INFO)
return self._level
def _set_level(self, value=None):
"""
Sets a custom minimum recorded level.
If set to ``None``, the default level will be used (see the
``_get_level`` method).
"""
if value is None and hasattr(self, '_level'):
del self._level
else:
self._level = int(value)
level = property(_get_level, _set_level, _set_level)
| apache-2.0 |
venmo/ansible | v1/ansible/runner/lookup_plugins/file.py | 153 | 2413 | # (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from ansible import utils, errors
import os
import codecs
class LookupModule(object):
def __init__(self, basedir=None, **kwargs):
self.basedir = basedir
def run(self, terms, inject=None, **kwargs):
terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject)
ret = []
# this can happen if the variable contains a string, strictly not desired for lookup
# plugins, but users may try it, so make it work.
if not isinstance(terms, list):
terms = [ terms ]
for term in terms:
basedir_path = utils.path_dwim(self.basedir, term)
relative_path = None
playbook_path = None
# Special handling of the file lookup, used primarily when the
# lookup is done from a role. If the file isn't found in the
# basedir of the current file, use dwim_relative to look in the
# role/files/ directory, and finally the playbook directory
# itself (which will be relative to the current working dir)
if '_original_file' in inject:
relative_path = utils.path_dwim_relative(inject['_original_file'], 'files', term, self.basedir, check=False)
if 'playbook_dir' in inject:
playbook_path = os.path.join(inject['playbook_dir'], term)
for path in (basedir_path, relative_path, playbook_path):
if path and os.path.exists(path):
ret.append(codecs.open(path, encoding="utf8").read().rstrip())
break
else:
raise errors.AnsibleError("could not locate file in lookup: %s" % term)
return ret
| gpl-3.0 |
fredericlepied/ansible | lib/ansible/plugins/callback/selective.py | 31 | 9522 | # (c) Fastly, inc 2016
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
selective.py callback plugin.
This callback only prints tasks that have been tagged with `print_action` or that have failed.
Tasks that are not printed are placed with a '.'.
For example:
- debug: msg="This will not be printed"
- debug: msg="But this will"
tags: [print_action]"
This allows operators to focus on the tasks that provide value only.
If you increase verbosity all tasks are printed.
"""
from __future__ import (absolute_import, division, print_function)
import difflib
import os
from ansible.plugins.callback import CallbackBase
from ansible.module_utils._text import to_text
__metaclass__ = type
COLORS = {
'normal': '\033[0m',
'ok': '\033[92m',
'bold': '\033[1m',
'not_so_bold': '\033[1m\033[34m',
'changed': '\033[93m',
'failed': '\033[91m',
'endc': '\033[0m',
'skipped': '\033[96m',
}
DONT_COLORIZE = os.getenv('ANSIBLE_SELECTIVE_DONT_COLORIZE', default=False)
def dict_diff(prv, nxt):
"""Return a dict of keys that differ with another config object."""
keys = set(prv.keys() + nxt.keys())
result = {}
for k in keys:
if prv.get(k) != nxt.get(k):
result[k] = (prv.get(k), nxt.get(k))
return result
def colorize(msg, color):
"""Given a string add necessary codes to format the string."""
if DONT_COLORIZE:
return msg
else:
return '{}{}{}'.format(COLORS[color], msg, COLORS['endc'])
class CallbackModule(CallbackBase):
"""selective.py callback plugin."""
CALLBACK_VERSION = 2.0
CALLBACK_TYPE = 'stdout'
CALLBACK_NAME = 'selective'
def __init__(self, display=None):
"""selective.py callback plugin."""
super(CallbackModule, self).__init__(display)
self.last_skipped = False
self.last_task_name = None
self.printed_last_task = False
def _print_task(self, task_name=None):
if task_name is None:
task_name = self.last_task_name
if not self.printed_last_task:
self.printed_last_task = True
line_length = 120
if self.last_skipped:
print()
msg = colorize("# {} {}".format(task_name,
'*' * (line_length - len(task_name))), 'bold')
print(msg)
def _indent_text(self, text, indent_level):
lines = text.splitlines()
result_lines = []
for l in lines:
result_lines.append("{}{}".format(' ' * indent_level, l))
return '\n'.join(result_lines)
def _print_diff(self, diff, indent_level):
if isinstance(diff, dict):
try:
diff = '\n'.join(difflib.unified_diff(diff['before'].splitlines(),
diff['after'].splitlines(),
fromfile=diff.get('before_header',
'new_file'),
tofile=diff['after_header']))
except AttributeError:
diff = dict_diff(diff['before'], diff['after'])
if diff:
diff = colorize(str(diff), 'changed')
print(self._indent_text(diff, indent_level + 4))
def _print_host_or_item(self, host_or_item, changed, msg, diff, is_host, error, stdout, stderr):
if is_host:
indent_level = 0
name = colorize(host_or_item.name, 'not_so_bold')
else:
indent_level = 4
if isinstance(host_or_item, dict):
if 'key' in host_or_item.keys():
host_or_item = host_or_item['key']
name = colorize(to_text(host_or_item), 'bold')
if error:
color = 'failed'
change_string = colorize('FAILED!!!', color)
else:
color = 'changed' if changed else 'ok'
change_string = colorize("changed={}".format(changed), color)
msg = colorize(msg, color)
line_length = 120
spaces = ' ' * (40 - len(name) - indent_level)
line = "{} * {}{}- {}".format(' ' * indent_level, name, spaces, change_string)
if len(msg) < 50:
line += ' -- {}'.format(msg)
print("{} {}---------".format(line, '-' * (line_length - len(line))))
else:
print("{} {}".format(line, '-' * (line_length - len(line))))
print(self._indent_text(msg, indent_level + 4))
if diff is not None:
self._print_diff(diff, indent_level)
if stdout is not None:
stdout = colorize(stdout, 'failed')
print(self._indent_text(stdout, indent_level + 4))
if stderr is not None:
stderr = colorize(stderr, 'failed')
print(self._indent_text(stderr, indent_level + 4))
def v2_playbook_on_play_start(self, play):
"""Run on start of the play."""
pass
def v2_playbook_on_task_start(self, task, **kwargs):
"""Run when a task starts."""
self.last_task_name = task.get_name()
self.printed_last_task = False
def v2_runner_on_ok(self, result, **kwargs):
"""Run when a task finishes correctly."""
failed = 'failed' in result._result
unreachable = 'unreachable' in result._result
if 'print_action' in result._task.tags or failed or unreachable or \
self._display.verbosity > 1:
self._print_task()
self.last_skipped = False
msg = to_text(result._result.get('msg', '')) or\
to_text(result._result.get('reason', ''))
self._print_host_or_item(result._host,
result._result.get('changed', False),
msg,
result._result.get('diff', None),
is_host=True,
error=failed or unreachable,
stdout=result._result.get('module_stdout', None),
stderr=result._result.get('exception', None),
)
if 'results' in result._result:
for r in result._result['results']:
failed = 'failed' in r
self._print_host_or_item(r['item'],
r.get('changed', False),
to_text(r.get('msg', '')),
r.get('diff', None),
is_host=False,
error=failed,
stdout=r.get('module_stdout', None),
stderr=r.get('exception', None),
)
else:
self.last_skipped = True
print('.', end="")
def v2_playbook_on_stats(self, stats):
"""Display info about playbook statistics."""
print()
self.printed_last_task = False
self._print_task('STATS')
hosts = sorted(stats.processed.keys())
for host in hosts:
s = stats.summarize(host)
if s['failures'] or s['unreachable']:
color = 'failed'
elif s['changed']:
color = 'changed'
else:
color = 'ok'
msg = '{} : ok={}\tchanged={}\tfailed={}\tunreachable={}'.format(
host, s['ok'], s['changed'], s['failures'], s['unreachable'])
print(colorize(msg, color))
def v2_runner_on_skipped(self, result, **kwargs):
"""Run when a task is skipped."""
if self._display.verbosity > 1:
self._print_task()
self.last_skipped = False
line_length = 120
spaces = ' ' * (31 - len(result._host.name) - 4)
line = " * {}{}- {}".format(colorize(result._host.name, 'not_so_bold'),
spaces,
colorize("skipped", 'skipped'),)
reason = result._result.get('skipped_reason', '') or \
result._result.get('skip_reason', '')
if len(reason) < 50:
line += ' -- {}'.format(reason)
print("{} {}---------".format(line, '-' * (line_length - len(line))))
else:
print("{} {}".format(line, '-' * (line_length - len(line))))
print(self._indent_text(reason, 8))
print(reason)
v2_playbook_on_handler_task_start = v2_playbook_on_task_start
v2_runner_on_failed = v2_runner_on_ok
v2_runner_on_unreachable = v2_runner_on_ok
| gpl-3.0 |
naterh/ironic | ironic/drivers/modules/irmc/management.py | 5 | 9161 | # Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
iRMC Management Driver
"""
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.common.i18n import _LE
from ironic.conductor import task_manager
from ironic.drivers.modules import ipmitool
from ironic.drivers.modules.irmc import common as irmc_common
from ironic.drivers import utils as driver_utils
scci = importutils.try_import('scciclient.irmc.scci')
LOG = logging.getLogger(__name__)
# Boot Option Parameters #5 Data2 defined in
# Set/Get System Boot Options Command, IPMI spec v2.0.
_BOOTPARAM5_DATA2 = {boot_devices.PXE: '0x04',
boot_devices.DISK: '0x08',
boot_devices.CDROM: '0x14',
boot_devices.BIOS: '0x18',
boot_devices.SAFE: '0x0c',
}
def _get_sensors_data(task):
"""Get sensors data method.
It gets sensor data from the task's node via SCCI, and convert the data
from XML to the dict format.
:param task: A TaskManager instance.
:raises: FailedToGetSensorData when getting the sensor data fails.
:returns: Returns a consistent formatted dict of sensor data grouped
by sensor type, which can be processed by Ceilometer.
"""
try:
report = irmc_common.get_irmc_report(task.node)
sensor = scci.get_sensor_data(report)
except (exception.InvalidParameterValue,
exception.MissingParameterValue,
scci.SCCIInvalidInputError,
scci.SCCIClientError) as e:
LOG.error(_LE("SCCI get sensor data failed for node %(node_id)s "
"with the following error: %(error)s"),
{'node_id': task.node.uuid, 'error': e})
raise exception.FailedToGetSensorData(
node=task.node.uuid, error=e)
sensors_data = {}
for sdr in sensor:
sensor_type_name = sdr.find('./Data/Decoded/Sensor/TypeName')
sensor_type_number = sdr.find('./Data/Decoded/Sensor/Type')
entity_name = sdr.find('./Data/Decoded/Entity/Name')
entity_id = sdr.find('./Data/Decoded/Entity/ID')
if None in (sensor_type_name, sensor_type_number,
entity_name, entity_id):
continue
sensor_type = ('%s (%s)' %
(sensor_type_name.text, sensor_type_number.text))
sensor_id = ('%s (%s)' %
(entity_name.text, entity_id.text))
reading_value = sdr.find(
'./Data/Decoded/Sensor/Thresholds/*/Normalized')
reading_value_text = "None" if (
reading_value is None) else str(reading_value.text)
reading_units = sdr.find('./Data/Decoded/Sensor/BaseUnitName')
reading_units_text = "None" if (
reading_units is None) else str(reading_units.text)
sensor_reading = '%s %s' % (reading_value_text, reading_units_text)
sensors_data.setdefault(sensor_type, {})[sensor_id] = {
'Sensor Reading': sensor_reading,
'Sensor ID': sensor_id,
'Units': reading_units_text,
}
return sensors_data
class IRMCManagement(ipmitool.IPMIManagement):
def get_properties(self):
"""Return the properties of the interface.
:returns: Dictionary of <property name>:<property description> entries.
"""
return irmc_common.COMMON_PROPERTIES
def validate(self, task):
"""Validate the driver-specific management information.
This method validates whether the 'driver_info' property of the
supplied node contains the required information for this driver.
:param task: A TaskManager instance containing the node to act on.
:raises: InvalidParameterValue if required parameters are invalid.
:raises: MissingParameterValue if a required parameter is missing.
"""
irmc_common.parse_driver_info(task.node)
irmc_common.update_ipmi_properties(task)
super(IRMCManagement, self).validate(task)
@task_manager.require_exclusive_lock
def set_boot_device(self, task, device, persistent=False):
"""Set the boot device for a node.
Set the boot device to use on next reboot of the node.
:param task: A task from TaskManager.
:param device: The boot device, one of the supported devices
listed in :mod:`ironic.common.boot_devices`.
:param persistent: Boolean value. True if the boot device will
persist to all future boots, False if not.
Default: False.
:raises: InvalidParameterValue if an invalid boot device is
specified.
:raises: MissingParameterValue if a required parameter is missing.
:raises: IPMIFailure on an error from ipmitool.
"""
if driver_utils.get_node_capability(task.node, 'boot_mode') == 'uefi':
if device not in self.get_supported_boot_devices(task):
raise exception.InvalidParameterValue(_(
"Invalid boot device %s specified.") % device)
timeout_disable = "0x00 0x08 0x03 0x08"
ipmitool.send_raw(task, timeout_disable)
# note(naohirot): As of ipmitool version 1.8.13,
# in case of chassis command, the efiboot option doesn't
# get set with persistent at the same time.
# $ ipmitool chassis bootdev pxe options=efiboot,persistent
# In case of raw command, however, both can be set at the
# same time.
# $ ipmitool raw 0x00 0x08 0x05 0xe0 0x04 0x00 0x00 0x00
# data1^^ ^^data2
# ipmi cmd '0x08' : Set System Boot Options
# data1 '0xe0' : persistent and uefi
# data1 '0xa0' : next boot only and uefi
#
data1 = '0xe0' if persistent else '0xa0'
bootparam5 = '0x00 0x08 0x05 %s %s 0x00 0x00 0x00'
cmd08 = bootparam5 % (data1, _BOOTPARAM5_DATA2[device])
ipmitool.send_raw(task, cmd08)
else:
super(IRMCManagement, self).set_boot_device(
task, device, persistent)
def get_sensors_data(self, task):
"""Get sensors data method.
It gets sensor data from the task's node via SCCI, and convert the data
from XML to the dict format.
:param task: A TaskManager instance.
:raises: FailedToGetSensorData when getting the sensor data fails.
:raises: FailedToParseSensorData when parsing sensor data fails.
:raises: InvalidParameterValue if required parameters are invalid.
:raises: MissingParameterValue if a required parameter is missing.
:returns: Returns a consistent formatted dict of sensor data grouped
by sensor type, which can be processed by Ceilometer.
Example::
{
'Sensor Type 1': {
'Sensor ID 1': {
'Sensor Reading': 'Value1 Units1',
'Sensor ID': 'Sensor ID 1',
'Units': 'Units1'
},
'Sensor ID 2': {
'Sensor Reading': 'Value2 Units2',
'Sensor ID': 'Sensor ID 2',
'Units': 'Units2'
}
},
'Sensor Type 2': {
'Sensor ID 3': {
'Sensor Reading': 'Value3 Units3',
'Sensor ID': 'Sensor ID 3',
'Units': 'Units3'
},
'Sensor ID 4': {
'Sensor Reading': 'Value4 Units4',
'Sensor ID': 'Sensor ID 4',
'Units': 'Units4'
}
}
}
"""
# irmc_common.parse_driver_info() makes sure that
# d_info['irmc_sensor_method'] is either 'scci' or 'ipmitool'.
d_info = irmc_common.parse_driver_info(task.node)
sensor_method = d_info['irmc_sensor_method']
if sensor_method == 'scci':
return _get_sensors_data(task)
elif sensor_method == 'ipmitool':
return super(IRMCManagement, self).get_sensors_data(task)
| apache-2.0 |
thresholdsoftware/asylum | openerp/addons/account/wizard/account_unreconcile.py | 56 | 2140 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import osv
class account_unreconcile(osv.osv_memory):
_name = "account.unreconcile"
_description = "Account Unreconcile"
def trans_unrec(self, cr, uid, ids, context=None):
obj_move_line = self.pool.get('account.move.line')
if context is None:
context = {}
if context.get('active_ids', False):
obj_move_line._remove_move_reconcile(cr, uid, context['active_ids'], context=context)
return {'type': 'ir.actions.act_window_close'}
account_unreconcile()
class account_unreconcile_reconcile(osv.osv_memory):
_name = "account.unreconcile.reconcile"
_description = "Account Unreconcile Reconcile"
def trans_unrec_reconcile(self, cr, uid, ids, context=None):
obj_move_reconcile = self.pool.get('account.move.reconcile')
if context is None:
context = {}
rec_ids = context['active_ids']
if rec_ids:
obj_move_reconcile.unlink(cr, uid, rec_ids, context=context)
return {'type': 'ir.actions.act_window_close'}
account_unreconcile_reconcile()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
dit/dit | dit/algorithms/minimal_sufficient_statistic.py | 1 | 8951 | """
Functions for computing minimal sufficient statistics.
"""
from collections import defaultdict
from .lattice import dist_from_induced_sigalg, insert_join, insert_rv
from .prune_expand import pruned_samplespace
from ..helpers import flatten, parse_rvs, normalize_rvs
from ..math import sigma_algebra
from ..samplespace import CartesianProduct
__all__ = (
'info_trim',
'insert_mss',
'mss',
'mss_sigalg',
)
def partial_match(first, second, places):
"""
Returns whether `second` is a marginal outcome at `places` of `first`.
Parameters
----------
first : iterable
The un-marginalized outcome.
second : iterable
The smaller, marginalized outcome.
places : list
The locations of `second` in `first`.
Returns
-------
match : bool
Whether `first` and `second` match or not.
"""
return tuple(first[i] for i in places) == tuple(second)
def mss_sigalg(dist, rvs, about=None, rv_mode=None):
"""
Construct the sigma algebra for the minimal sufficient statistic of `rvs`
about `about`.
Parameters
----------
dist : Distribution
The distribution which defines the base sigma-algebra.
rvs : list
A list of random variables to be compressed into a minimal sufficient
statistic.
about : list
A list of random variables for which the minimal sufficient static will
retain all information about.
rv_mode : str, None
Specifies how to interpret the elements of `rvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`rvs` are interpreted as random variable indices. If equal to 'names',
the the elements are interpreted as random variable names. If `None`,
then the value of `dist._rv_mode` is consulted.
Returns
-------
mss_sa : frozenset of frozensets
The induced sigma-algebra of the minimal sufficient statistic.
Examples
--------
>>> d = Xor()
>>> mss_sigalg(d, [0], [1, 2])
frozenset({frozenset(),
frozenset({'000', '011'}),
frozenset({'101', '110'}),
frozenset({'000', '011', '101', '110'})})
"""
mapping = parse_rvs(dist, rvs, rv_mode=rv_mode)[1]
partition = defaultdict(list)
md, cds = dist.condition_on(rvs=about, crvs=rvs, rv_mode=rv_mode)
for marg, cd in zip(md.outcomes, cds):
matches = [o for o in dist.outcomes if partial_match(o, marg, mapping)]
for c in partition.keys():
if c.is_approx_equal(cd):
partition[c].extend(matches)
break
else:
partition[cd].extend(matches)
mss_sa = sigma_algebra(map(frozenset, partition.values()))
return mss_sa
def insert_mss(dist, idx, rvs, about=None, rv_mode=None):
"""
Inserts the minimal sufficient statistic of `rvs` about `about` into `dist`
at index `idx`.
Parameters
----------
dist : Distribution
The distribution which defines the base sigma-algebra.
idx : int
The location in the distribution to insert the minimal sufficient
statistic.
rvs : list
A list of random variables to be compressed into a minimal sufficient
statistic.
about : list
A list of random variables for which the minimal sufficient static will
retain all information about.
rv_mode : str, None
Specifies how to interpret the elements of `rvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`rvs` are interpreted as random variable indices. If equal to 'names',
the the elements are interpreted as random variable names. If `None`,
then the value of `dist._rv_mode` is consulted.
Returns
-------
d : Distribution
The distribution `dist` modified to contain the minimal sufficient
statistic.
Examples
--------
>>> d = Xor()
>>> print(insert_mss(d, -1, [0], [1, 2]))
Class: Distribution
Alphabet: ('0', '1') for all rvs
Base: linear
Outcome Class: str
Outcome Length: 4
RV Names: None
x p(x)
0000 0.25
0110 0.25
1011 0.25
1101 0.25
"""
mss_sa = mss_sigalg(dist, rvs, about, rv_mode)
new_dist = insert_rv(dist, idx, mss_sa)
return pruned_samplespace(new_dist)
def mss(dist, rvs, about=None, rv_mode=None, int_outcomes=True):
"""
Parameters
----------
dist : Distribution
The distribution which defines the base sigma-algebra.
rvs : list
A list of random variables to be compressed into a minimal sufficient
statistic.
about : list
A list of random variables for which the minimal sufficient static will
retain all information about.
rv_mode : str, None
Specifies how to interpret the elements of `rvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of `rvs`
are interpreted as random variable indices. If equal to 'names', the the
elements are interpreted as random variable names. If `None`, then the
value of `dist._rv_mode` is consulted.
int_outcomes : bool
If `True`, then the outcomes of the minimal sufficient statistic are
relabeled as integers instead of as the atoms of the induced
sigma-algebra.
Returns
-------
d : ScalarDistribution
The distribution of the minimal sufficient statistic.
Examples
--------
>>> d = Xor()
>>> print(mss(d, [0], [1, 2]))
Class: ScalarDistribution
Alphabet: (0, 1)
Base: linear
x p(x)
0 0.5
1 0.5
"""
mss_sa = mss_sigalg(dist, rvs, about, rv_mode)
d = dist_from_induced_sigalg(dist, mss_sa, int_outcomes)
return d
def insert_joint_mss(dist, idx, rvs=None, rv_mode=None):
"""
Returns a new distribution with the join of the minimal sufficient statistic
of each random variable in `rvs` about all the other variables.
Parameters
----------
dist : Distribution
The distribution contiaining the random variables from which the joint
minimal sufficent statistic will be computed.
idx : int
The location in the distribution to insert the joint minimal sufficient
statistic.
rvs : list
A list of random variables to be compressed into a joint minimal
sufficient statistic.
rv_mode : str, None
Specifies how to interpret the elements of `rvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`rvs` are interpreted as random variable indices. If equal to 'names',
the the elements are interpreted as random variable names. If `None`,
then the value of `dist._rv_mode` is consulted.
"""
rvs, _, rv_mode = normalize_rvs(dist, rvs, None, rv_mode)
d = dist.copy()
l1 = d.outcome_length()
rvs = {tuple(rv) for rv in rvs}
for rv in rvs:
about = list(flatten(rvs - {rv}))
d = insert_mss(d, -1, rvs=list(rv), about=about, rv_mode=rv_mode)
l2 = d.outcome_length()
idx = -1 if idx > l1 else idx
d = insert_join(d, idx, [[i] for i in range(l1, l2)])
delta = 0 if idx == -1 else 1
d = d.marginalize([i + delta for i in range(l1, l2)])
d = pruned_samplespace(d)
if isinstance(dist._sample_space, CartesianProduct):
d._sample_space = CartesianProduct(d.alphabet)
return d
def info_trim(dist, rvs=None, rv_mode=None):
"""
Returns a new distribution with the minimal sufficient statistics
of each random variable in `rvs` about all the other variables.
Parameters
----------
dist : Distribution
The distribution contiaining the random variables from which the joint
minimal sufficent statistic will be computed.
rvs : list
A list of random variables to be compressed into minimal sufficient
statistics.
rv_mode : str, None
Specifies how to interpret the elements of `rvs`. Valid options are:
{'indices', 'names'}. If equal to 'indices', then the elements of
`rvs` are interpreted as random variable indices. If equal to 'names',
the the elements are interpreted as random variable names. If `None`,
then the value of `dist._rv_mode` is consulted.
"""
rvs, _, rv_mode = normalize_rvs(dist, rvs, None, rv_mode)
d = dist.copy()
rvs2 = {tuple(rv) for rv in rvs}
for rv in rvs:
about = list(flatten(rvs2 - {tuple(rv)}))
d = insert_mss(d, -1, rvs=tuple(rv), about=about, rv_mode=rv_mode)
d = pruned_samplespace(d.marginalize(list(flatten(rvs))))
if isinstance(dist._sample_space, CartesianProduct):
d._sample_space = CartesianProduct(d.alphabet)
return d
| bsd-3-clause |
StackPointCloud/ansible-modules-extras | messaging/rabbitmq_plugin.py | 109 | 4594 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Chatham Financial <oss@chathamfinancial.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: rabbitmq_plugin
short_description: Adds or removes plugins to RabbitMQ
description:
- Enables or disables RabbitMQ plugins
version_added: "1.1"
author: '"Chris Hoffman (@chrishoffman)"'
options:
names:
description:
- Comma-separated list of plugin names
required: true
default: null
aliases: [name]
new_only:
description:
- Only enable missing plugins
- Does not disable plugins that are not in the names list
required: false
default: "no"
choices: [ "yes", "no" ]
state:
description:
- Specify if plugins are to be enabled or disabled
required: false
default: enabled
choices: [enabled, disabled]
prefix:
description:
- Specify a custom install prefix to a Rabbit
required: false
version_added: "1.3"
default: null
'''
EXAMPLES = '''
# Enables the rabbitmq_management plugin
- rabbitmq_plugin: names=rabbitmq_management state=enabled
'''
import os
class RabbitMqPlugins(object):
def __init__(self, module):
self.module = module
if module.params['prefix']:
if os.path.isdir(os.path.join(module.params['prefix'], 'bin')):
bin_path = os.path.join(module.params['prefix'], 'bin')
elif os.path.isdir(os.path.join(module.params['prefix'], 'sbin')):
bin_path = os.path.join(module.params['prefix'], 'sbin')
else:
# No such path exists.
raise Exception("No binary folder in prefix %s" %
module.params['prefix'])
self._rabbitmq_plugins = bin_path + "/rabbitmq-plugins"
else:
self._rabbitmq_plugins = module.get_bin_path('rabbitmq-plugins', True)
def _exec(self, args, run_in_check_mode=False):
if not self.module.check_mode or (self.module.check_mode and run_in_check_mode):
cmd = [self._rabbitmq_plugins]
rc, out, err = self.module.run_command(cmd + args, check_rc=True)
return out.splitlines()
return list()
def get_all(self):
list_output = self._exec(['list', '-E', '-m'], True)
plugins = []
for plugin in list_output:
if not plugin:
break
plugins.append(plugin)
return plugins
def enable(self, name):
self._exec(['enable', name])
def disable(self, name):
self._exec(['disable', name])
def main():
arg_spec = dict(
names=dict(required=True, aliases=['name']),
new_only=dict(default='no', type='bool'),
state=dict(default='enabled', choices=['enabled', 'disabled']),
prefix=dict(required=False, default=None)
)
module = AnsibleModule(
argument_spec=arg_spec,
supports_check_mode=True
)
names = module.params['names'].split(',')
new_only = module.params['new_only']
state = module.params['state']
rabbitmq_plugins = RabbitMqPlugins(module)
enabled_plugins = rabbitmq_plugins.get_all()
enabled = []
disabled = []
if state == 'enabled':
if not new_only:
for plugin in enabled_plugins:
if plugin not in names:
rabbitmq_plugins.disable(plugin)
disabled.append(plugin)
for name in names:
if name not in enabled_plugins:
rabbitmq_plugins.enable(name)
enabled.append(name)
else:
for plugin in enabled_plugins:
if plugin in names:
rabbitmq_plugins.disable(plugin)
disabled.append(plugin)
changed = len(enabled) > 0 or len(disabled) > 0
module.exit_json(changed=changed, enabled=enabled, disabled=disabled)
# import module snippets
from ansible.module_utils.basic import *
main()
| gpl-3.0 |
jhoos/django | tests/admin_custom_urls/models.py | 288 | 2513 | from functools import update_wrapper
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.db import models
from django.http import HttpResponseRedirect
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Action(models.Model):
name = models.CharField(max_length=50, primary_key=True)
description = models.CharField(max_length=70)
def __str__(self):
return self.name
class ActionAdmin(admin.ModelAdmin):
"""
A ModelAdmin for the Action model that changes the URL of the add_view
to '<app name>/<model name>/!add/'
The Action model has a CharField PK.
"""
list_display = ('name', 'description')
def remove_url(self, name):
"""
Remove all entries named 'name' from the ModelAdmin instance URL
patterns list
"""
return [url for url in super(ActionAdmin, self).get_urls() if url.name != name]
def get_urls(self):
# Add the URL of our custom 'add_view' view to the front of the URLs
# list. Remove the existing one(s) first
from django.conf.urls import url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
view_name = '%s_%s_add' % info
return [
url(r'^!add/$', wrap(self.add_view), name=view_name),
] + self.remove_url(view_name)
class Person(models.Model):
name = models.CharField(max_length=20)
class PersonAdmin(admin.ModelAdmin):
def response_post_save_add(self, request, obj):
return HttpResponseRedirect(
reverse('admin:admin_custom_urls_person_history', args=[obj.pk]))
def response_post_save_change(self, request, obj):
return HttpResponseRedirect(
reverse('admin:admin_custom_urls_person_delete', args=[obj.pk]))
class Car(models.Model):
name = models.CharField(max_length=20)
class CarAdmin(admin.ModelAdmin):
def response_add(self, request, obj, post_url_continue=None):
return super(CarAdmin, self).response_add(
request, obj, post_url_continue=reverse('admin:admin_custom_urls_car_history', args=[obj.pk]))
site = admin.AdminSite(name='admin_custom_urls')
site.register(Action, ActionAdmin)
site.register(Person, PersonAdmin)
site.register(Car, CarAdmin)
| bsd-3-clause |
mbernasocchi/QGIS | python/pyplugin_installer/__init__.py | 45 | 1403 | # -*- coding: utf-8 -*-
"""
***************************************************************************
__init__.py
---------------------
Date : May 2013
Copyright : (C) 2013 by Borys Jurgiel
Email : info at borysjurgiel dot pl
This module is based on former plugin_installer plugin:
Copyright (C) 2007-2008 Matthew Perry
Copyright (C) 2008-2013 Borys Jurgiel
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
__author__ = 'Borys Jurgiel'
__date__ = 'May 2013'
__copyright__ = '(C) 2013, Borys Jurgiel'
# import functions for easier access
from . import installer
from .installer import initPluginInstaller # NOQA
def instance():
if not installer.pluginInstaller:
installer.initPluginInstaller()
return installer.pluginInstaller
| gpl-2.0 |
ephoning/heroku-buildpack-python | vendor/distribute-0.6.36/release.py | 36 | 4463 | #!/usr/bin/env python
"""
Script to fully automate the release process. Requires Python 2.6+
with sphinx installed and the 'hg' command on the path.
"""
from __future__ import print_function
import subprocess
import shutil
import os
import sys
import urllib2
import getpass
import collections
try:
import keyring
except Exception:
pass
VERSION = '0.6.36'
def get_next_version():
digits = map(int, VERSION.split('.'))
digits[-1] += 1
return '.'.join(map(str, digits))
NEXT_VERSION = get_next_version()
files_with_versions = ('docs/conf.py', 'setup.py', 'release.py',
'README.txt', 'distribute_setup.py')
def get_repo_name():
"""
Get the repo name from the hgrc default path.
"""
default = subprocess.check_output('hg paths default').strip()
parts = default.split('/')
if parts[-1] == '':
parts.pop()
return '/'.join(parts[-2:])
def get_mercurial_creds(system='https://bitbucket.org', username=None):
"""
Return named tuple of username,password in much the same way that
Mercurial would (from the keyring).
"""
# todo: consider getting this from .hgrc
username = username or getpass.getuser()
keyring_username = '@@'.join((username, system))
system = 'Mercurial'
password = (
keyring.get_password(system, keyring_username)
if 'keyring' in globals()
else None
)
if not password:
password = getpass.getpass()
Credential = collections.namedtuple('Credential', 'username password')
return Credential(username, password)
def add_milestone_and_version(version=NEXT_VERSION):
auth = 'Basic ' + ':'.join(get_mercurial_creds()).encode('base64').strip()
headers = {
'Authorization': auth,
}
base = 'https://api.bitbucket.org'
for type in 'milestones', 'versions':
url = (base + '/1.0/repositories/{repo}/issues/{type}'
.format(repo = get_repo_name(), type=type))
req = urllib2.Request(url = url, headers = headers,
data='name='+version)
try:
urllib2.urlopen(req)
except urllib2.HTTPError as e:
print(e.fp.read())
def bump_versions():
list(map(bump_version, files_with_versions))
def bump_version(filename):
with open(filename, 'rb') as f:
lines = [line.replace(VERSION, NEXT_VERSION) for line in f]
with open(filename, 'wb') as f:
f.writelines(lines)
def do_release():
assert all(map(os.path.exists, files_with_versions)), (
"Expected file(s) missing")
assert has_sphinx(), "You must have Sphinx installed to release"
res = raw_input('Have you read through the SCM changelog and '
'confirmed the changelog is current for releasing {VERSION}? '
.format(**globals()))
if not res.lower().startswith('y'):
print("Please do that")
raise SystemExit(1)
print("Travis-CI tests: http://travis-ci.org/#!/jaraco/distribute")
res = raw_input('Have you or has someone verified that the tests '
'pass on this revision? ')
if not res.lower().startswith('y'):
print("Please do that")
raise SystemExit(2)
subprocess.check_call(['hg', 'tag', VERSION])
subprocess.check_call(['hg', 'update', VERSION])
has_docs = build_docs()
if os.path.isdir('./dist'):
shutil.rmtree('./dist')
cmd = [sys.executable, 'setup.py', '-q', 'egg_info', '-RD', '-b', '',
'sdist', 'register', 'upload']
if has_docs:
cmd.append('upload_docs')
subprocess.check_call(cmd)
upload_bootstrap_script()
# update to the tip for the next operation
subprocess.check_call(['hg', 'update'])
# we just tagged the current version, bump for the next release.
bump_versions()
subprocess.check_call(['hg', 'ci', '-m',
'Bumped to {NEXT_VERSION} in preparation for next '
'release.'.format(**globals())])
# push the changes
subprocess.check_call(['hg', 'push'])
add_milestone_and_version()
def has_sphinx():
try:
devnull = open(os.path.devnull, 'wb')
subprocess.Popen(['sphinx-build', '--version'], stdout=devnull,
stderr=subprocess.STDOUT).wait()
except Exception:
return False
return True
def build_docs():
if not os.path.isdir('docs'):
return
if os.path.isdir('docs/build'):
shutil.rmtree('docs/build')
subprocess.check_call([
'sphinx-build',
'-b', 'html',
'-d', 'build/doctrees',
'.',
'build/html',
],
cwd='docs')
return True
def upload_bootstrap_script():
scp_command = 'pscp' if sys.platform.startswith('win') else 'scp'
try:
subprocess.check_call([scp_command, 'distribute_setup.py',
'pypi@ziade.org:python-distribute.org/'])
except:
print("Unable to upload bootstrap script. Ask Tarek to do it.")
if __name__ == '__main__':
do_release()
| mit |
pombredanne/capstone | bindings/python/test_lite.py | 33 | 3276 | #!/usr/bin/env python
# Capstone Python bindings, by Nguyen Anh Quynnh <aquynh@gmail.com>
from __future__ import print_function
from capstone import *
import binascii
from xprint import to_hex
X86_CODE16 = b"\x8d\x4c\x32\x08\x01\xd8\x81\xc6\x34\x12\x00\x00"
X86_CODE32 = b"\x8d\x4c\x32\x08\x01\xd8\x81\xc6\x34\x12\x00\x00"
X86_CODE64 = b"\x55\x48\x8b\x05\xb8\x13\x00\x00"
ARM_CODE = b"\xED\xFF\xFF\xEB\x04\xe0\x2d\xe5\x00\x00\x00\x00\xe0\x83\x22\xe5\xf1\x02\x03\x0e\x00\x00\xa0\xe3\x02\x30\xc1\xe7\x00\x00\x53\xe3"
ARM_CODE2 = b"\x10\xf1\x10\xe7\x11\xf2\x31\xe7\xdc\xa1\x2e\xf3\xe8\x4e\x62\xf3"
THUMB_CODE = b"\x70\x47\xeb\x46\x83\xb0\xc9\x68"
THUMB_CODE2 = b"\x4f\xf0\x00\x01\xbd\xe8\x00\x88\xd1\xe8\x00\xf0"
MIPS_CODE = b"\x0C\x10\x00\x97\x00\x00\x00\x00\x24\x02\x00\x0c\x8f\xa2\x00\x00\x34\x21\x34\x56"
MIPS_CODE2 = b"\x56\x34\x21\x34\xc2\x17\x01\x00"
ARM64_CODE = b"\x21\x7c\x02\x9b\x21\x7c\x00\x53\x00\x40\x21\x4b\xe1\x0b\x40\xb9"
PPC_CODE = b"\x80\x20\x00\x00\x80\x3f\x00\x00\x10\x43\x23\x0e\xd0\x44\x00\x80\x4c\x43\x22\x02\x2d\x03\x00\x80\x7c\x43\x20\x14\x7c\x43\x20\x93\x4f\x20\x00\x21\x4c\xc8\x00\x21"
all_tests = (
(CS_ARCH_X86, CS_MODE_16, X86_CODE16, "X86 16bit (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_32, X86_CODE32, "X86 32bit (ATT syntax)", CS_OPT_SYNTAX_ATT),
(CS_ARCH_X86, CS_MODE_32, X86_CODE32, "X86 32 (Intel syntax)", 0),
(CS_ARCH_X86, CS_MODE_64, X86_CODE64, "X86 64 (Intel syntax)", 0),
(CS_ARCH_ARM, CS_MODE_ARM, ARM_CODE, "ARM", 0),
(CS_ARCH_ARM, CS_MODE_THUMB, THUMB_CODE2, "THUMB-2", 0),
(CS_ARCH_ARM, CS_MODE_ARM, ARM_CODE2, "ARM: Cortex-A15 + NEON", 0),
(CS_ARCH_ARM, CS_MODE_THUMB, THUMB_CODE, "THUMB", 0),
(CS_ARCH_MIPS, CS_MODE_MIPS32 + CS_MODE_BIG_ENDIAN, MIPS_CODE, "MIPS-32 (Big-endian)", 0),
(CS_ARCH_MIPS, CS_MODE_MIPS64 + CS_MODE_LITTLE_ENDIAN, MIPS_CODE2, "MIPS-64-EL (Little-endian)", 0),
(CS_ARCH_ARM64, CS_MODE_ARM, ARM64_CODE, "ARM-64", 0),
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, PPC_CODE, "PPC-64", 0),
(CS_ARCH_PPC, CS_MODE_BIG_ENDIAN, PPC_CODE, "PPC-64, print register with number only", CS_OPT_SYNTAX_NOREGNAME),
)
# ## Test cs_disasm_quick()
def test_cs_disasm_quick():
for (arch, mode, code, comment, syntax) in all_tests:
print('*' * 40)
print("Platform: %s" % comment)
print("Disasm:"),
print(to_hex(code))
for (addr, size, mnemonic, op_str) in cs_disasm_lite(arch, mode, code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print()
# ## Test class Cs
def test_class():
for (arch, mode, code, comment, syntax) in all_tests:
print('*' * 16)
print("Platform: %s" % comment)
print("Code: %s" % to_hex(code))
print("Disasm:")
try:
md = Cs(arch, mode)
if syntax != 0:
md.syntax = syntax
for (addr, size, mnemonic, op_str) in md.disasm_lite(code, 0x1000):
print("0x%x:\t%s\t%s" % (addr, mnemonic, op_str))
print("0x%x:" % (addr + size))
print()
except CsError as e:
print("ERROR: %s" % e)
# test_cs_disasm_quick()
# print "*" * 40
if __name__ == '__main__':
test_class()
| bsd-3-clause |
cyril51/Sick-Beard | sickbeard/notifiers/nmjv2.py | 35 | 7880 | # Author: Jasper Lanting
# Based on nmj.py by Nico Berlee: http://nico.berlee.nl/
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import urllib, urllib2,xml.dom.minidom
from xml.dom.minidom import parseString
import sickbeard
import telnetlib
import re
import time
from sickbeard import logger
try:
import xml.etree.cElementTree as etree
except ImportError:
import xml.etree.ElementTree as etree
class NMJv2Notifier:
def notify_snatch(self, ep_name):
return False
#Not implemented: Start the scanner when snatched does not make any sense
def notify_download(self, ep_name):
self._notifyNMJ()
def test_notify(self, host):
return self._sendNMJ(host)
def notify_settings(self, host, dbloc, instance):
"""
Retrieves the NMJv2 database location from Popcorn hour
host: The hostname/IP of the Popcorn Hour server
dbloc: 'local' for PCH internal harddrive. 'network' for PCH network shares
instance: Allows for selection of different DB in case of multiple databases
Returns: True if the settings were retrieved successfully, False otherwise
"""
try:
url_loc = "http://" + host + ":8008/file_operation?arg0=list_user_storage_file&arg1=&arg2="+instance+"&arg3=20&arg4=true&arg5=true&arg6=true&arg7=all&arg8=name_asc&arg9=false&arg10=false"
req = urllib2.Request(url_loc)
handle1 = urllib2.urlopen(req)
response1 = handle1.read()
xml = parseString(response1)
time.sleep (300.0 / 1000.0)
for node in xml.getElementsByTagName('path'):
xmlTag=node.toxml();
xmlData=xmlTag.replace('<path>','').replace('</path>','').replace('[=]','')
url_db = "http://" + host + ":8008/metadata_database?arg0=check_database&arg1="+ xmlData
reqdb = urllib2.Request(url_db)
handledb = urllib2.urlopen(reqdb)
responsedb = handledb.read()
xmldb = parseString(responsedb)
returnvalue=xmldb.getElementsByTagName('returnValue')[0].toxml().replace('<returnValue>','').replace('</returnValue>','')
if returnvalue=="0":
DB_path=xmldb.getElementsByTagName('database_path')[0].toxml().replace('<database_path>','').replace('</database_path>','').replace('[=]','')
if dbloc=="local" and DB_path.find("localhost")>-1:
sickbeard.NMJv2_HOST=host
sickbeard.NMJv2_DATABASE=DB_path
return True
if dbloc=="network" and DB_path.find("://")>-1:
sickbeard.NMJv2_HOST=host
sickbeard.NMJv2_DATABASE=DB_path
return True
except IOError, e:
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
return False
return False
def notify_subtitle_download(self, ep_name, lang):
if sickbeard.USE_NMJv2:
self._sendNMJ()
def _sendNMJ(self, host):
"""
Sends a NMJ update command to the specified machine
host: The hostname/IP to send the request to (no port)
database: The database to send the requst to
mount: The mount URL to use (optional)
Returns: True if the request succeeded, False otherwise
"""
#if a host is provided then attempt to open a handle to that URL
try:
url_scandir = "http://" + host + ":8008/metadata_database?arg0=update_scandir&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=&arg3=update_all"
logger.log(u"NMJ scan update command send to host: %s" % (host))
url_updatedb = "http://" + host + ":8008/metadata_database?arg0=scanner_start&arg1="+ sickbeard.NMJv2_DATABASE +"&arg2=background&arg3="
logger.log(u"Try to mount network drive via url: %s" % (host), logger.DEBUG)
prereq = urllib2.Request(url_scandir)
req = urllib2.Request(url_updatedb)
handle1 = urllib2.urlopen(prereq)
response1 = handle1.read()
time.sleep (300.0 / 1000.0)
handle2 = urllib2.urlopen(req)
response2 = handle2.read()
except IOError, e:
logger.log(u"Warning: Couldn't contact popcorn hour on host %s: %s" % (host, e))
return False
try:
et = etree.fromstring(response1)
result1 = et.findtext("returnValue")
except SyntaxError, e:
logger.log(u"Unable to parse XML returned from the Popcorn Hour: update_scandir, %s" % (e), logger.ERROR)
return False
try:
et = etree.fromstring(response2)
result2 = et.findtext("returnValue")
except SyntaxError, e:
logger.log(u"Unable to parse XML returned from the Popcorn Hour: scanner_start, %s" % (e), logger.ERROR)
return False
# if the result was a number then consider that an error
error_codes=["8","11","22","49","50","51","60"]
error_messages=["Invalid parameter(s)/argument(s)",
"Invalid database path",
"Insufficient size",
"Database write error",
"Database read error",
"Open fifo pipe failed",
"Read only file system"]
if int(result1) > 0:
index=error_codes.index(result1)
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
return False
else:
if int(result2) > 0:
index=error_codes.index(result2)
logger.log(u"Popcorn Hour returned an error: %s" % (error_messages[index]))
return False
else:
logger.log(u"NMJv2 started background scan")
return True
def _notifyNMJ(self, host=None, force=False):
"""
Sends a NMJ update command based on the SB config settings
host: The host to send the command to (optional, defaults to the host in the config)
database: The database to use (optional, defaults to the database in the config)
mount: The mount URL (optional, defaults to the mount URL in the config)
force: If True then the notification will be sent even if NMJ is disabled in the config
"""
if not sickbeard.USE_NMJv2 and not force:
logger.log("Notification for NMJ scan update not enabled, skipping this notification", logger.DEBUG)
return False
# fill in omitted parameters
if not host:
host = sickbeard.NMJv2_HOST
logger.log(u"Sending scan command for NMJ ", logger.DEBUG)
return self._sendNMJ(host)
notifier = NMJv2Notifier
| gpl-3.0 |
ttglennhall/DjangoGirlsTutorial | myvenv/lib/python3.4/site-packages/django/contrib/gis/gdal/envelope.py | 477 | 7009 | """
The GDAL/OGR library uses an Envelope structure to hold the bounding
box information for a geometry. The envelope (bounding box) contains
two pairs of coordinates, one for the lower left coordinate and one
for the upper right coordinate:
+----------o Upper right; (max_x, max_y)
| |
| |
| |
Lower left (min_x, min_y) o----------+
"""
from ctypes import Structure, c_double
from django.contrib.gis.gdal.error import GDALException
# The OGR definition of an Envelope is a C structure containing four doubles.
# See the 'ogr_core.h' source file for more information:
# http://www.gdal.org/ogr/ogr__core_8h-source.html
class OGREnvelope(Structure):
"Represents the OGREnvelope C Structure."
_fields_ = [("MinX", c_double),
("MaxX", c_double),
("MinY", c_double),
("MaxY", c_double),
]
class Envelope(object):
"""
The Envelope object is a C structure that contains the minimum and
maximum X, Y coordinates for a rectangle bounding box. The naming
of the variables is compatible with the OGR Envelope structure.
"""
def __init__(self, *args):
"""
The initialization function may take an OGREnvelope structure, 4-element
tuple or list, or 4 individual arguments.
"""
if len(args) == 1:
if isinstance(args[0], OGREnvelope):
# OGREnvelope (a ctypes Structure) was passed in.
self._envelope = args[0]
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) != 4:
raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
self._from_sequence(args[0])
else:
raise TypeError('Incorrect type of argument: %s' % str(type(args[0])))
elif len(args) == 4:
# Individual parameters passed in.
# Thanks to ww for the help
self._from_sequence([float(a) for a in args])
else:
raise GDALException('Incorrect number (%d) of arguments.' % len(args))
# Checking the x,y coordinates
if self.min_x > self.max_x:
raise GDALException('Envelope minimum X > maximum X.')
if self.min_y > self.max_y:
raise GDALException('Envelope minimum Y > maximum Y.')
def __eq__(self, other):
"""
Returns True if the envelopes are equivalent; can compare against
other Envelopes and 4-tuples.
"""
if isinstance(other, Envelope):
return (self.min_x == other.min_x) and (self.min_y == other.min_y) and \
(self.max_x == other.max_x) and (self.max_y == other.max_y)
elif isinstance(other, tuple) and len(other) == 4:
return (self.min_x == other[0]) and (self.min_y == other[1]) and \
(self.max_x == other[2]) and (self.max_y == other[3])
else:
raise GDALException('Equivalence testing only works with other Envelopes.')
def __str__(self):
"Returns a string representation of the tuple."
return str(self.tuple)
def _from_sequence(self, seq):
"Initializes the C OGR Envelope structure from the given sequence."
self._envelope = OGREnvelope()
self._envelope.MinX = seq[0]
self._envelope.MinY = seq[1]
self._envelope.MaxX = seq[2]
self._envelope.MaxY = seq[3]
def expand_to_include(self, *args):
"""
Modifies the envelope to expand to include the boundaries of
the passed-in 2-tuple (a point), 4-tuple (an extent) or
envelope.
"""
# We provide a number of different signatures for this method,
# and the logic here is all about converting them into a
# 4-tuple single parameter which does the actual work of
# expanding the envelope.
if len(args) == 1:
if isinstance(args[0], Envelope):
return self.expand_to_include(args[0].tuple)
elif hasattr(args[0], 'x') and hasattr(args[0], 'y'):
return self.expand_to_include(args[0].x, args[0].y, args[0].x, args[0].y)
elif isinstance(args[0], (tuple, list)):
# A tuple was passed in.
if len(args[0]) == 2:
return self.expand_to_include((args[0][0], args[0][1], args[0][0], args[0][1]))
elif len(args[0]) == 4:
(minx, miny, maxx, maxy) = args[0]
if minx < self._envelope.MinX:
self._envelope.MinX = minx
if miny < self._envelope.MinY:
self._envelope.MinY = miny
if maxx > self._envelope.MaxX:
self._envelope.MaxX = maxx
if maxy > self._envelope.MaxY:
self._envelope.MaxY = maxy
else:
raise GDALException('Incorrect number of tuple elements (%d).' % len(args[0]))
else:
raise TypeError('Incorrect type of argument: %s' % str(type(args[0])))
elif len(args) == 2:
# An x and an y parameter were passed in
return self.expand_to_include((args[0], args[1], args[0], args[1]))
elif len(args) == 4:
# Individual parameters passed in.
return self.expand_to_include(args)
else:
raise GDALException('Incorrect number (%d) of arguments.' % len(args[0]))
@property
def min_x(self):
"Returns the value of the minimum X coordinate."
return self._envelope.MinX
@property
def min_y(self):
"Returns the value of the minimum Y coordinate."
return self._envelope.MinY
@property
def max_x(self):
"Returns the value of the maximum X coordinate."
return self._envelope.MaxX
@property
def max_y(self):
"Returns the value of the maximum Y coordinate."
return self._envelope.MaxY
@property
def ur(self):
"Returns the upper-right coordinate."
return (self.max_x, self.max_y)
@property
def ll(self):
"Returns the lower-left coordinate."
return (self.min_x, self.min_y)
@property
def tuple(self):
"Returns a tuple representing the envelope."
return (self.min_x, self.min_y, self.max_x, self.max_y)
@property
def wkt(self):
"Returns WKT representing a Polygon for this envelope."
# TODO: Fix significant figures.
return 'POLYGON((%s %s,%s %s,%s %s,%s %s,%s %s))' % \
(self.min_x, self.min_y, self.min_x, self.max_y,
self.max_x, self.max_y, self.max_x, self.min_y,
self.min_x, self.min_y)
| mit |
bonitadecker77/python-for-android | python-modules/twisted/twisted/internet/test/test_threads.py | 56 | 6132 | # Copyright (c) 2008-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for implementations of L{IReactorThreads}.
"""
__metaclass__ = type
from weakref import ref
import gc
from twisted.internet.test.reactormixins import ReactorBuilder
from twisted.python.threadpool import ThreadPool
class ThreadTestsBuilder(ReactorBuilder):
"""
Builder for defining tests relating to L{IReactorThreads}.
"""
def test_getThreadPool(self):
"""
C{reactor.getThreadPool()} returns an instance of L{ThreadPool} which
starts when C{reactor.run()} is called and stops before it returns.
"""
state = []
reactor = self.buildReactor()
pool = reactor.getThreadPool()
self.assertIsInstance(pool, ThreadPool)
self.assertFalse(
pool.started, "Pool should not start before reactor.run")
def f():
# Record the state for later assertions
state.append(pool.started)
state.append(pool.joined)
reactor.stop()
reactor.callWhenRunning(f)
self.runReactor(reactor, 2)
self.assertTrue(
state[0], "Pool should start after reactor.run")
self.assertFalse(
state[1], "Pool should not be joined before reactor.stop")
self.assertTrue(
pool.joined,
"Pool should be stopped after reactor.run returns")
def test_suggestThreadPoolSize(self):
"""
C{reactor.suggestThreadPoolSize()} sets the maximum size of the reactor
threadpool.
"""
reactor = self.buildReactor()
reactor.suggestThreadPoolSize(17)
pool = reactor.getThreadPool()
self.assertEqual(pool.max, 17)
def test_delayedCallFromThread(self):
"""
A function scheduled with L{IReactorThreads.callFromThread} invoked
from a delayed call is run immediately in the next reactor iteration.
When invoked from the reactor thread, previous implementations of
L{IReactorThreads.callFromThread} would skip the pipe/socket based wake
up step, assuming the reactor would wake up on its own. However, this
resulted in the reactor not noticing a insert into the thread queue at
the right time (in this case, after the thread queue has been processed
for that reactor iteration).
"""
reactor = self.buildReactor()
def threadCall():
reactor.stop()
# Set up the use of callFromThread being tested.
reactor.callLater(0, reactor.callFromThread, threadCall)
before = reactor.seconds()
self.runReactor(reactor, 60)
after = reactor.seconds()
# We specified a timeout of 60 seconds. The timeout code in runReactor
# probably won't actually work, though. If the reactor comes out of
# the event notification API just a little bit early, say after 59.9999
# seconds instead of after 60 seconds, then the queued thread call will
# get processed but the timeout delayed call runReactor sets up won't!
# Then the reactor will stop and runReactor will return without the
# timeout firing. As it turns out, select() and poll() are quite
# likely to return *slightly* earlier than we ask them to, so the
# timeout will rarely happen, even if callFromThread is broken. So,
# instead we'll measure the elapsed time and make sure it's something
# less than about half of the timeout we specified. This is heuristic.
# It assumes that select() won't ever return after 30 seconds when we
# asked it to timeout after 60 seconds. And of course like all
# time-based tests, it's slightly non-deterministic. If the OS doesn't
# schedule this process for 30 seconds, then the test might fail even
# if callFromThread is working.
self.assertTrue(after - before < 30)
def test_stopThreadPool(self):
"""
When the reactor stops, L{ReactorBase._stopThreadPool} drops the
reactor's direct reference to its internal threadpool and removes
the associated startup and shutdown triggers.
This is the case of the thread pool being created before the reactor
is run.
"""
reactor = self.buildReactor()
threadpool = ref(reactor.getThreadPool())
reactor.callWhenRunning(reactor.stop)
self.runReactor(reactor)
gc.collect()
self.assertIdentical(threadpool(), None)
def test_stopThreadPoolWhenStartedAfterReactorRan(self):
"""
We must handle the case of shutting down the thread pool when it was
started after the reactor was run in a special way.
Some implementation background: The thread pool is started with
callWhenRunning, which only returns a system trigger ID when it is
invoked before the reactor is started.
This is the case of the thread pool being created after the reactor
is started.
"""
reactor = self.buildReactor()
threadPoolRefs = []
def acquireThreadPool():
threadPoolRefs.append(ref(reactor.getThreadPool()))
reactor.stop()
reactor.callWhenRunning(acquireThreadPool)
self.runReactor(reactor)
gc.collect()
self.assertIdentical(threadPoolRefs[0](), None)
def test_cleanUpThreadPoolEvenBeforeReactorIsRun(self):
"""
When the reactor has its shutdown event fired before it is run, the
thread pool is completely destroyed.
For what it's worth, the reason we support this behavior at all is
because Trial does this.
This is the case of the thread pool being created without the reactor
being started at al.
"""
reactor = self.buildReactor()
threadPoolRef = ref(reactor.getThreadPool())
reactor.fireSystemEvent("shutdown")
gc.collect()
self.assertIdentical(threadPoolRef(), None)
globals().update(ThreadTestsBuilder.makeTestCaseClasses())
| apache-2.0 |
yushakov/simulif | simulif/py_scripts/entropy/bst.py | 1 | 7932 | # -*- coding: utf-8 -*-
"""
Created on Sat Jun 6 11:53:27 2015
@author: http://www.cs.toronto.edu/~rdanek/csc148h_09/lectures/8/bst.py
"""
class BinarySearchTree:
def __init__(self):
""" create an empty binary search tree """
self.root = None
def put(self, key, value):
""" add a new mapping between key and value to the BST """
if self.root:
self.root.put(key,value)
else:
self.root = TreeNode(key,value)
def get(self, key):
""" retrieve the value associated with the given key """
if self.root:
return self.root.get(key)
else:
return None
def has_key(self, key):
""" check if the node with the given key is in the tree """
# the following assumes None is never stored with a key
return not self.get(key) is None
def delete(self, key):
""" delete the node with the given key if it exists """
if self.root:
self.root = self.root.delete(key)
def __iter__(self):
""" returns an iterator for the binary search tree """
class EmptyIterator:
def next(self):
raise StopIteration
if self.root:
# if the tree is not empty, just return the root's iterator
return iter(self.root)
else:
# otherwise return the iterator that immediately raises
# a StopIteration exception
return EmptyIterator()
class TreeNode:
def __init__(self, key, val):
self.key = key
self.val = val
self.left = None
self.right = None
def __iter__(self):
""" return the iterator that iterates through the elements in the BST
rooted at this node in an inorder sequence """
if self.left:
# The following iterates through all the nodes in the left subtree.
# The first thing that python does when the for loop is encountered
# is to obtain an iterator object for the left subtree.
# This is done ("under the covers") by recursively calling
# the __iter__ method on the left child.
for elt in self.left:
yield elt
# at this point we "visit" the current node
yield (self.key, self.val)
if self.right:
# we now visit all the nodes in the right subtree
for elt in self.right:
yield elt
def put(self, key, val):
""" add a new mapping between key and value in the tree """
if self.key == key:
self.val = val # replace existing value
elif self.key > key: # key belongs in left subtree
if self.left:
self.left.put(key,val)
else: # left subtree is empty
self.left = TreeNode(key,val)
else: # key belongs in right subtree
if self.right:
self.right.put(key,val)
else: # right subtree is empty
self.right = TreeNode(key,val)
def get(self, key):
""" get the value associated with the key """
if self.key == key:
return self.val
if self.key > key: # key should be in the left subtree
if self.left:
return self.left.get(key)
else:
return None
else: # key should be in the right subtree
if self.right:
return self.right.get(key)
else:
return None
def delete(self, key):
""" delete the node with the given key and return the
root node of the tree """
if self.key == key:
# found the node we need to delete
if self.right and self.left:
# get the successor node and its parent
[psucc, succ] = self.right._findMin(self)
# splice out the successor
# (we need the parent to do this)
if psucc.left == succ:
psucc.left = succ.right
else:
psucc.right = succ.right
# reset the left and right children of the successor
succ.left = self.left
succ.right = self.right
return succ
else:
# "easier" case
if self.left:
return self.left # promote the left subtree
else:
return self.right # promote the right subtree
else:
if self.key > key: # key should be in the left subtree
if self.left:
self.left = self.left.delete(key)
# else the key is not in the tree
else: # key should be in the right subtree
if self.right:
self.right = self.right.delete(key)
return self
def _findMin(self, parent):
""" return the minimum node in the current tree and its parent """
# we use an ugly trick: the parent node is passed in as an argument
# so that eventually when the leftmost child is reached, the
# call can return both the parent to the successor and the successor
if self.left:
return self.left._findMin(self)
else:
return [parent, self]
def preorder(root):
if root:
print(root.key)
preorder(root.left)
preorder(root.right)
def postorder(root):
if root:
postorder(root.left)
postorder(root.right)
print(root.key)
def inorder(root):
if root:
inorder(root.left)
print(root.key)
inorder(root.right)
def inorder_val(root):
if root:
inorder_val(root.left)
print(str(root.key) + ", " + str(root.val))
inorder_val(root.right)
def print_tree(root):
'''Print the tree rooted at root.'''
print_helper(root, "")
def print_helper(root, indent):
'''Print the tree rooted at BTNode root. Print str indent (which
consists only of whitespace) before the root value; indent more for the
subtrees so that it looks nice.'''
if root is not None:
print_helper(root.right, indent + " ")
print(indent + str(root.key))
print_helper(root.left, indent + " ")
def bst_test():
"""Command prompt-like tree testing"""
tree = BinarySearchTree()
line = input("command> ")
while line != "quit":
cmdlist = line.split()
if cmdlist != []:
cmd = cmdlist[0]
if cmd == "has_key":
print(tree.has_key(int(cmdlist[1])))
elif cmd == "put":
if len(cmdlist) == 2:
val = "default"
else:
val = cmdlist[2]
tree.put(int(cmdlist[1]),val)
print_tree(tree.root)
elif cmd == "get":
print(tree.get(int(cmdlist[1])))
elif cmd == "delete":
tree.delete(int(cmdlist[1]))
print_tree(tree.root)
elif cmd == "iterate":
for elt in tree:
print(elt)
line = input("command> ")
| gpl-3.0 |
sadaf2605/django | django/contrib/gis/gdal/layer.py | 109 | 8689 | from ctypes import byref, c_double
from django.contrib.gis.gdal.base import GDALBase
from django.contrib.gis.gdal.envelope import Envelope, OGREnvelope
from django.contrib.gis.gdal.error import (
GDALException, OGRIndexError, SRSException,
)
from django.contrib.gis.gdal.feature import Feature
from django.contrib.gis.gdal.field import OGRFieldTypes
from django.contrib.gis.gdal.geometries import OGRGeometry
from django.contrib.gis.gdal.geomtype import OGRGeomType
from django.contrib.gis.gdal.prototypes import (
ds as capi, geom as geom_api, srs as srs_api,
)
from django.contrib.gis.gdal.srs import SpatialReference
from django.utils import six
from django.utils.encoding import force_bytes, force_text
from django.utils.six.moves import range
# For more information, see the OGR C API source code:
# http://www.gdal.org/ogr__api_8h.html
#
# The OGR_L_* routines are relevant here.
class Layer(GDALBase):
"A class that wraps an OGR Layer, needs to be instantiated from a DataSource object."
def __init__(self, layer_ptr, ds):
"""
Initializes on an OGR C pointer to the Layer and the `DataSource` object
that owns this layer. The `DataSource` object is required so that a
reference to it is kept with this Layer. This prevents garbage
collection of the `DataSource` while this Layer is still active.
"""
if not layer_ptr:
raise GDALException('Cannot create Layer, invalid pointer given')
self.ptr = layer_ptr
self._ds = ds
self._ldefn = capi.get_layer_defn(self._ptr)
# Does the Layer support random reading?
self._random_read = self.test_capability(b'RandomRead')
def __getitem__(self, index):
"Gets the Feature at the specified index."
if isinstance(index, six.integer_types):
# An integer index was given -- we cannot do a check based on the
# number of features because the beginning and ending feature IDs
# are not guaranteed to be 0 and len(layer)-1, respectively.
if index < 0:
raise OGRIndexError('Negative indices are not allowed on OGR Layers.')
return self._make_feature(index)
elif isinstance(index, slice):
# A slice was given
start, stop, stride = index.indices(self.num_feat)
return [self._make_feature(fid) for fid in range(start, stop, stride)]
else:
raise TypeError('Integers and slices may only be used when indexing OGR Layers.')
def __iter__(self):
"Iterates over each Feature in the Layer."
# ResetReading() must be called before iteration is to begin.
capi.reset_reading(self._ptr)
for i in range(self.num_feat):
yield Feature(capi.get_next_feature(self._ptr), self)
def __len__(self):
"The length is the number of features."
return self.num_feat
def __str__(self):
"The string name of the layer."
return self.name
def _make_feature(self, feat_id):
"""
Helper routine for __getitem__ that constructs a Feature from the given
Feature ID. If the OGR Layer does not support random-access reading,
then each feature of the layer will be incremented through until the
a Feature is found matching the given feature ID.
"""
if self._random_read:
# If the Layer supports random reading, return.
try:
return Feature(capi.get_feature(self.ptr, feat_id), self)
except GDALException:
pass
else:
# Random access isn't supported, have to increment through
# each feature until the given feature ID is encountered.
for feat in self:
if feat.fid == feat_id:
return feat
# Should have returned a Feature, raise an OGRIndexError.
raise OGRIndexError('Invalid feature id: %s.' % feat_id)
# #### Layer properties ####
@property
def extent(self):
"Returns the extent (an Envelope) of this layer."
env = OGREnvelope()
capi.get_extent(self.ptr, byref(env), 1)
return Envelope(env)
@property
def name(self):
"Returns the name of this layer in the Data Source."
name = capi.get_fd_name(self._ldefn)
return force_text(name, self._ds.encoding, strings_only=True)
@property
def num_feat(self, force=1):
"Returns the number of features in the Layer."
return capi.get_feature_count(self.ptr, force)
@property
def num_fields(self):
"Returns the number of fields in the Layer."
return capi.get_field_count(self._ldefn)
@property
def geom_type(self):
"Returns the geometry type (OGRGeomType) of the Layer."
return OGRGeomType(capi.get_fd_geom_type(self._ldefn))
@property
def srs(self):
"Returns the Spatial Reference used in this Layer."
try:
ptr = capi.get_layer_srs(self.ptr)
return SpatialReference(srs_api.clone_srs(ptr))
except SRSException:
return None
@property
def fields(self):
"""
Returns a list of string names corresponding to each of the Fields
available in this Layer.
"""
return [force_text(capi.get_field_name(capi.get_field_defn(self._ldefn, i)),
self._ds.encoding, strings_only=True)
for i in range(self.num_fields)]
@property
def field_types(self):
"""
Returns a list of the types of fields in this Layer. For example,
the list [OFTInteger, OFTReal, OFTString] would be returned for
an OGR layer that had an integer, a floating-point, and string
fields.
"""
return [OGRFieldTypes[capi.get_field_type(capi.get_field_defn(self._ldefn, i))]
for i in range(self.num_fields)]
@property
def field_widths(self):
"Returns a list of the maximum field widths for the features."
return [capi.get_field_width(capi.get_field_defn(self._ldefn, i))
for i in range(self.num_fields)]
@property
def field_precisions(self):
"Returns the field precisions for the features."
return [capi.get_field_precision(capi.get_field_defn(self._ldefn, i))
for i in range(self.num_fields)]
def _get_spatial_filter(self):
try:
return OGRGeometry(geom_api.clone_geom(capi.get_spatial_filter(self.ptr)))
except GDALException:
return None
def _set_spatial_filter(self, filter):
if isinstance(filter, OGRGeometry):
capi.set_spatial_filter(self.ptr, filter.ptr)
elif isinstance(filter, (tuple, list)):
if not len(filter) == 4:
raise ValueError('Spatial filter list/tuple must have 4 elements.')
# Map c_double onto params -- if a bad type is passed in it
# will be caught here.
xmin, ymin, xmax, ymax = map(c_double, filter)
capi.set_spatial_filter_rect(self.ptr, xmin, ymin, xmax, ymax)
elif filter is None:
capi.set_spatial_filter(self.ptr, None)
else:
raise TypeError('Spatial filter must be either an OGRGeometry instance, a 4-tuple, or None.')
spatial_filter = property(_get_spatial_filter, _set_spatial_filter)
# #### Layer Methods ####
def get_fields(self, field_name):
"""
Returns a list containing the given field name for every Feature
in the Layer.
"""
if field_name not in self.fields:
raise GDALException('invalid field name: %s' % field_name)
return [feat.get(field_name) for feat in self]
def get_geoms(self, geos=False):
"""
Returns a list containing the OGRGeometry for every Feature in
the Layer.
"""
if geos:
from django.contrib.gis.geos import GEOSGeometry
return [GEOSGeometry(feat.geom.wkb) for feat in self]
else:
return [feat.geom for feat in self]
def test_capability(self, capability):
"""
Returns a bool indicating whether the this Layer supports the given
capability (a string). Valid capability strings include:
'RandomRead', 'SequentialWrite', 'RandomWrite', 'FastSpatialFilter',
'FastFeatureCount', 'FastGetExtent', 'CreateField', 'Transactions',
'DeleteFeature', and 'FastSetNextByIndex'.
"""
return bool(capi.test_capability(self.ptr, force_bytes(capability)))
| bsd-3-clause |
eduNEXT/edunext-ecommerce | ecommerce/extensions/order/migrations/0001_initial.py | 1 | 24150 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import django.db.models.deletion
import oscar.models.fields
import oscar.models.fields.autoslugfield
from django.conf import settings
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('partner', '0001_initial'),
('customer', '0001_initial'),
('address', '0001_initial'),
('basket', '0002_auto_20140827_1705'),
('catalogue', '0001_initial'),
('sites', '0001_initial'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='BillingAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(verbose_name='Title', max_length=64, blank=True, choices=[('Mr', 'Mr'), ('Miss', 'Miss'), ('Mrs', 'Mrs'), ('Ms', 'Ms'), ('Dr', 'Dr')])),
('first_name', models.CharField(max_length=255, verbose_name='First name', blank=True)),
('last_name', models.CharField(max_length=255, verbose_name='Last name', blank=True)),
('line1', models.CharField(max_length=255, verbose_name='First line of address')),
('line2', models.CharField(max_length=255, verbose_name='Second line of address', blank=True)),
('line3', models.CharField(max_length=255, verbose_name='Third line of address', blank=True)),
('line4', models.CharField(max_length=255, verbose_name='City', blank=True)),
('state', models.CharField(max_length=255, verbose_name='State/County', blank=True)),
('postcode', oscar.models.fields.UppercaseCharField(max_length=64, verbose_name='Post/Zip-code', blank=True)),
('search_text', models.TextField(editable=False, verbose_name='Search text - used only for searching addresses')),
('country', models.ForeignKey(verbose_name='Country', to='address.Country', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Billing addresses',
'verbose_name': 'Billing address',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CommunicationEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date')),
('event_type', models.ForeignKey(verbose_name='Event Type', to='customer.CommunicationEventType', on_delete=models.CASCADE)),
],
options={
'ordering': ['-date_created'],
'verbose_name_plural': 'Communication Events',
'verbose_name': 'Communication Event',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Line',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('partner_name', models.CharField(max_length=128, verbose_name='Partner name', blank=True)),
('partner_sku', models.CharField(max_length=128, verbose_name='Partner SKU')),
('partner_line_reference', models.CharField(verbose_name='Partner reference', max_length=128, help_text='This is the item number that the partner uses within their system', blank=True)),
('partner_line_notes', models.TextField(verbose_name='Partner Notes', blank=True)),
('title', models.CharField(max_length=255, verbose_name='Title')),
('upc', models.CharField(verbose_name='UPC', max_length=128, blank=True, null=True)),
('quantity', models.PositiveIntegerField(default=1, verbose_name='Quantity')),
('line_price_incl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price (inc. tax)')),
('line_price_excl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price (excl. tax)')),
('line_price_before_discounts_incl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price before discounts (inc. tax)')),
('line_price_before_discounts_excl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price before discounts (excl. tax)')),
('unit_cost_price', models.DecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Unit Cost Price', null=True)),
('unit_price_incl_tax', models.DecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Unit Price (inc. tax)', null=True)),
('unit_price_excl_tax', models.DecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Unit Price (excl. tax)', null=True)),
('unit_retail_price', models.DecimalField(max_digits=12, decimal_places=2, blank=True, verbose_name='Unit Retail Price', null=True)),
('status', models.CharField(max_length=255, verbose_name='Status', blank=True)),
('est_dispatch_date', models.DateField(blank=True, verbose_name='Estimated Dispatch Date', null=True)),
],
options={
'verbose_name_plural': 'Order Lines',
'verbose_name': 'Order Line',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='LineAttribute',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=128, verbose_name='Type')),
('value', models.CharField(max_length=255, verbose_name='Value')),
('line', models.ForeignKey(verbose_name='Line', related_name='attributes', to='order.Line', on_delete=models.CASCADE)),
('option', models.ForeignKey(verbose_name='Option', on_delete=django.db.models.deletion.SET_NULL, related_name='line_attributes', to='catalogue.Option', null=True)),
],
options={
'verbose_name_plural': 'Line Attributes',
'verbose_name': 'Line Attribute',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='LinePrice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(default=1, verbose_name='Quantity')),
('price_incl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price (inc. tax)')),
('price_excl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Price (excl. tax)')),
('shipping_incl_tax', models.DecimalField(default=0, max_digits=12, decimal_places=2, verbose_name='Shiping (inc. tax)')),
('shipping_excl_tax', models.DecimalField(default=0, max_digits=12, decimal_places=2, verbose_name='Shipping (excl. tax)')),
('line', models.ForeignKey(verbose_name='Line', related_name='prices', to='order.Line', on_delete=models.CASCADE)),
],
options={
'ordering': ('id',),
'verbose_name_plural': 'Line Prices',
'verbose_name': 'Line Price',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('number', models.CharField(max_length=128, unique=True, db_index=True, verbose_name='Order number')),
('currency', models.CharField(default='GBP', max_length=12, verbose_name='Currency')),
('total_incl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Order total (inc. tax)')),
('total_excl_tax', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Order total (excl. tax)')),
('shipping_incl_tax', models.DecimalField(default=0, max_digits=12, decimal_places=2, verbose_name='Shipping charge (inc. tax)')),
('shipping_excl_tax', models.DecimalField(default=0, max_digits=12, decimal_places=2, verbose_name='Shipping charge (excl. tax)')),
('shipping_method', models.CharField(max_length=128, verbose_name='Shipping method', blank=True)),
('shipping_code', models.CharField(default='', max_length=128, blank=True)),
('status', models.CharField(max_length=100, verbose_name='Status', blank=True)),
('guest_email', models.EmailField(max_length=75, verbose_name='Guest email address', blank=True)),
('date_placed', models.DateTimeField(auto_now_add=True, db_index=True)),
('basket', models.ForeignKey(null=True, verbose_name='Basket', on_delete=django.db.models.deletion.SET_NULL, to='basket.Basket', blank=True)),
('billing_address', models.ForeignKey(null=True, verbose_name='Billing Address', on_delete=django.db.models.deletion.SET_NULL, to='order.BillingAddress', blank=True)),
],
options={
'ordering': ['-date_placed'],
'verbose_name_plural': 'Orders',
'verbose_name': 'Order',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OrderDiscount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('category', models.CharField(default='Basket', max_length=64, verbose_name='Discount category', choices=[('Basket', 'Basket'), ('Shipping', 'Shipping'), ('Deferred', 'Deferred')])),
('offer_id', models.PositiveIntegerField(blank=True, verbose_name='Offer ID', null=True)),
('offer_name', models.CharField(max_length=128, db_index=True, verbose_name='Offer name', blank=True)),
('voucher_id', models.PositiveIntegerField(blank=True, verbose_name='Voucher ID', null=True)),
('voucher_code', models.CharField(max_length=128, db_index=True, verbose_name='Code', blank=True)),
('frequency', models.PositiveIntegerField(verbose_name='Frequency', null=True)),
('amount', models.DecimalField(default=0, max_digits=12, decimal_places=2, verbose_name='Amount')),
('message', models.TextField(blank=True)),
('order', models.ForeignKey(verbose_name='Order', related_name='discounts', to='order.Order', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Order Discounts',
'verbose_name': 'Order Discount',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='OrderNote',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('note_type', models.CharField(max_length=128, verbose_name='Note Type', blank=True)),
('message', models.TextField(verbose_name='Message')),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
('date_updated', models.DateTimeField(auto_now=True, verbose_name='Date Updated')),
('order', models.ForeignKey(verbose_name='Order', related_name='notes', to='order.Order', on_delete=models.CASCADE)),
('user', models.ForeignKey(verbose_name='User', to=settings.AUTH_USER_MODEL, null=True, on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Order Notes',
'verbose_name': 'Order Note',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PaymentEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.DecimalField(max_digits=12, decimal_places=2, verbose_name='Amount')),
('reference', models.CharField(max_length=128, verbose_name='Reference', blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date created')),
],
options={
'ordering': ['-date_created'],
'verbose_name_plural': 'Payment Events',
'verbose_name': 'Payment Event',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PaymentEventQuantity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(verbose_name='Quantity')),
('event', models.ForeignKey(verbose_name='Event', related_name='line_quantities', to='order.PaymentEvent', on_delete=models.CASCADE)),
('line', models.ForeignKey(verbose_name='Line', related_name='payment_event_quantities', to='order.Line', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Payment Event Quantities',
'verbose_name': 'Payment Event Quantity',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PaymentEventType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(unique=True, max_length=128, verbose_name='Name')),
('code', oscar.models.fields.autoslugfield.AutoSlugField(populate_from='name', unique=True, verbose_name='Code', max_length=128, editable=False, blank=True)),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Payment Event Types',
'verbose_name': 'Payment Event Type',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ShippingAddress',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(verbose_name='Title', max_length=64, blank=True, choices=[('Mr', 'Mr'), ('Miss', 'Miss'), ('Mrs', 'Mrs'), ('Ms', 'Ms'), ('Dr', 'Dr')])),
('first_name', models.CharField(max_length=255, verbose_name='First name', blank=True)),
('last_name', models.CharField(max_length=255, verbose_name='Last name', blank=True)),
('line1', models.CharField(max_length=255, verbose_name='First line of address')),
('line2', models.CharField(max_length=255, verbose_name='Second line of address', blank=True)),
('line3', models.CharField(max_length=255, verbose_name='Third line of address', blank=True)),
('line4', models.CharField(max_length=255, verbose_name='City', blank=True)),
('state', models.CharField(max_length=255, verbose_name='State/County', blank=True)),
('postcode', oscar.models.fields.UppercaseCharField(max_length=64, verbose_name='Post/Zip-code', blank=True)),
('search_text', models.TextField(editable=False, verbose_name='Search text - used only for searching addresses')),
('phone_number', oscar.models.fields.PhoneNumberField(verbose_name='Phone number', help_text='In case we need to call you about your order', blank=True)),
('notes', models.TextField(verbose_name='Instructions', help_text='Tell us anything we should know when delivering your order.', blank=True)),
('country', models.ForeignKey(verbose_name='Country', to='address.Country', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Shipping addresses',
'verbose_name': 'Shipping address',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ShippingEvent',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('notes', models.TextField(verbose_name='Event notes', help_text='This could be the dispatch reference, or a tracking number', blank=True)),
('date_created', models.DateTimeField(auto_now_add=True, verbose_name='Date Created')),
],
options={
'ordering': ['-date_created'],
'verbose_name_plural': 'Shipping Events',
'verbose_name': 'Shipping Event',
'abstract': False,
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ShippingEventQuantity',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.PositiveIntegerField(verbose_name='Quantity')),
('event', models.ForeignKey(verbose_name='Event', related_name='line_quantities', to='order.ShippingEvent', on_delete=models.CASCADE)),
('line', models.ForeignKey(verbose_name='Line', related_name='shipping_event_quantities', to='order.Line', on_delete=models.CASCADE)),
],
options={
'verbose_name_plural': 'Shipping Event Quantities',
'verbose_name': 'Shipping Event Quantity',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='ShippingEventType',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(unique=True, max_length=255, verbose_name='Name')),
('code', oscar.models.fields.autoslugfield.AutoSlugField(populate_from='name', unique=True, verbose_name='Code', max_length=128, editable=False, blank=True)),
],
options={
'ordering': ('name',),
'verbose_name_plural': 'Shipping Event Types',
'verbose_name': 'Shipping Event Type',
'abstract': False,
},
bases=(models.Model,),
),
migrations.AlterUniqueTogether(
name='shippingeventquantity',
unique_together=set([('event', 'line')]),
),
migrations.AddField(
model_name='shippingevent',
name='event_type',
field=models.ForeignKey(verbose_name='Event Type', to='order.ShippingEventType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='shippingevent',
name='lines',
field=models.ManyToManyField(related_name='shipping_events', verbose_name='Lines', to='order.Line', through='order.ShippingEventQuantity'),
preserve_default=True,
),
migrations.AddField(
model_name='shippingevent',
name='order',
field=models.ForeignKey(verbose_name='Order', related_name='shipping_events', to='order.Order', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='paymenteventquantity',
unique_together=set([('event', 'line')]),
),
migrations.AddField(
model_name='paymentevent',
name='event_type',
field=models.ForeignKey(verbose_name='Event Type', to='order.PaymentEventType', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='paymentevent',
name='lines',
field=models.ManyToManyField(through='order.PaymentEventQuantity', verbose_name='Lines', to='order.Line'),
preserve_default=True,
),
migrations.AddField(
model_name='paymentevent',
name='order',
field=models.ForeignKey(verbose_name='Order', related_name='payment_events', to='order.Order', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='paymentevent',
name='shipping_event',
field=models.ForeignKey(related_name='payment_events', to='order.ShippingEvent', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='order',
name='shipping_address',
field=models.ForeignKey(null=True, verbose_name='Shipping Address', on_delete=django.db.models.deletion.SET_NULL, to='order.ShippingAddress', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='order',
name='site',
field=models.ForeignKey(verbose_name='Site', on_delete=django.db.models.deletion.SET_NULL, to='sites.Site', null=True),
preserve_default=True,
),
migrations.AddField(
model_name='order',
name='user',
field=models.ForeignKey(null=True, verbose_name='User', on_delete=django.db.models.deletion.SET_NULL, related_name='orders', to=settings.AUTH_USER_MODEL, blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='lineprice',
name='order',
field=models.ForeignKey(verbose_name='Option', related_name='line_prices', to='order.Order', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='line',
name='order',
field=models.ForeignKey(verbose_name='Order', related_name='lines', to='order.Order', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='line',
name='partner',
field=models.ForeignKey(null=True, verbose_name='Partner', on_delete=django.db.models.deletion.SET_NULL, related_name='order_lines', to='partner.Partner', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='line',
name='product',
field=models.ForeignKey(null=True, verbose_name='Product', on_delete=django.db.models.deletion.SET_NULL, to='catalogue.Product', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='line',
name='stockrecord',
field=models.ForeignKey(null=True, verbose_name='Stock record', on_delete=django.db.models.deletion.SET_NULL, to='partner.StockRecord', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='communicationevent',
name='order',
field=models.ForeignKey(verbose_name='Order', related_name='communication_events', to='order.Order', on_delete=models.CASCADE),
preserve_default=True,
),
]
| agpl-3.0 |
Mrmaxmeier/BombSquad-Community-Mod-Manager | mods/Basketball.py | 1 | 19339 | #Basketball
import bs
import bsUtils
import math
import random
# This game is played with the same rules as the classic American sport, Bombsquad style!
# Featuring: a hoop, fouls, foul shots, jump-balls, three-pointers, a referee, and two teams ready to duke it out.
# Dedicated to - David
def bsGetAPIVersion():
return 4
def bsGetGames():
return [Basketball]
class ImpactMessage(object):
pass
class Referee(bs.SpazBot):
character = 'Bernard'
chargeDistMax = 9999
throwDistMin = 9999
throwDistMax = 9999
color=(0,0,0)
highlight=(1,1,1)
punchiness = 0.0
chargeSpeedMin = 0.0
chargeSpeedMax = 0.0
class Hoop(bs.Actor):
def __init__(self,position=(0,5,-8),color=(1,1,1)):
self._r1 = 0.7
self._rFudge = 0.15
bs.Actor.__init__(self)
self._position = bs.Vector(*position)
self.color = color
p1 = position
p2 = (position[0]+1,position[1],position[2])
p3 = (position[0]-1,position[1],position[2])
showInSpace = False
self._hit = False
n1 = bs.newNode('locator',attrs={'shape':'circle','position':p1,
'color':self.color,'opacity':0.5,
'drawBeauty':showInSpace,'additive':True})
n2 = bs.newNode('locator',attrs={'shape':'circle','position':p2,
'color':self.color,'opacity':0.5,
'drawBeauty':showInSpace,'additive':True})
n3 = bs.newNode('locator',attrs={'shape':'circle','position':p3,
'color':self.color,'opacity':0.5,
'drawBeauty':showInSpace,'additive':True})
n4 = bs.newNode('light',attrs={'color':self.color,'position':p1,'intensity':.5})
bs.animateArray(n1,'size',1,{0:[0.0],200:[self._r1*2.0]})
bs.animateArray(n2,'size',1,{0:[0.0],200:[self._r1*2.0]})
bs.animateArray(n3,'size',1,{0:[0.0],200:[self._r1*2.0]})
self._nodes = [n1,n2,n3,n4]
class ThreePointLine(bs.Actor):
def __init__(self):
bs.Actor.__init__(self)
r1 = 6
n1 = bs.newNode('locator',attrs={'shape':'circleOutline','position':(0,4,-8),'color':(1,1,1),'opacity':.3,'drawBeauty':False,'additive':True})
self._nodes = [n1]
bs.animateArray(n1,'size',1,{50:[0.0],250:[r1*2.0]})
class BasketBallFactory(bs.BombFactory):
def __init__(self):
self.basketBallMaterial = bs.Material()
self.basketBallMaterial.addActions(conditions=(('weAreOlderThan',200),
'and',('theyAreOlderThan',200),
'and',('evalColliding',),
'and',(('theyHaveMaterial',bs.getSharedObject('footingMaterial')),
'or',('theyHaveMaterial',bs.getSharedObject('objectMaterial')))),
actions=(('message','ourNode','atConnect',ImpactMessage())))
bs.BombFactory.__init__(self)
class Baller(bs.PlayerSpaz):
def onBombPress(self):
pass
def onPickUpPress(self):
bs.PlayerSpaz.onPickUpPress(self)
self.node.getDelegate()._pos = self.node.positionCenter
class BasketBomb(bs.Bomb):
def __init__(self,position=(0,1,0),velocity=(0,0,0),bombType='normal',blastRadius=2.0,sourcePlayer=None,owner=None):
bs.Actor.__init__(self)
self.up = False
factory = BasketBallFactory()
self.bombType = 'basketball'
self._exploded = False
self.blastRadius = blastRadius
self._explodeCallbacks = []
self.sourcePlayer = sourcePlayer
self.hitType = 'impact'
self.hitSubType = 'basketball'
owner = bs.Node(None)
self.owner = owner
materials = (factory.bombMaterial, bs.getSharedObject('objectMaterial'))
materials = materials + (factory.normalSoundMaterial,)
materials = materials + (factory.basketBallMaterial,)
self.node = bs.newNode('prop',
delegate=self,
attrs={'position':position,
'velocity':velocity,
'body':'sphere',
'model':factory.bombModel,
'shadowSize':0.3,
'colorTexture':bs.getTexture('bonesColorMask'),
'reflection':'soft',
'reflectionScale':[1.5],
'materials':materials})
bsUtils.animate(self.node,"modelScale",{0:0, 200:1.3, 260:1})
def handleMessage(self, m):
if isinstance(m, bs.OutOfBoundsMessage):
self.getActivity().respawnBall((not self.getActivity().possession))
bs.Bomb.handleMessage(self, m)
elif isinstance(m, bs.PickedUpMessage):
self.heldLast = m.node.getDelegate().getPlayer()
self.getActivity().heldLast = self.heldLast
if self.heldLast in self.getActivity().teams[0].players: self.getActivity().possession = True
else: self.getActivity().possession = False
bs.Bomb.handleMessage(self, m)
if self.up == True:
activity = self.getActivity()
bs.gameTimer(3000,bs.WeakCall(activity.jumpBall))
self.up = True
elif isinstance(m, ImpactMessage): self.getActivity().handleShot(self)
elif isinstance(m, bs.DroppedMessage): self.up = False
else: bs.Bomb.handleMessage(self, m)
class Basketball(bs.TeamGameActivity):
@classmethod
def getName(cls):
return "Basketball"
@classmethod
def getDescription(cls, sessionType):
return "A classic sport, Bombsquad style!"
@classmethod
def getScoreInfo(cls):
return{'scoreType':'points'}
@classmethod
def getSettings(cls, sessionType):
return [("Epic Mode", {'default': False}),
("Enable Running", {'default': True}),
("Enable Jumping", {'default': True}),
("Play To: ", {
'choices': [
('1 point', 1),
('11 points', 11),
('21 points', 21),
('45 points', 45),
('100 points', 100)
],
'default': 21})]
@classmethod
def getSupportedMaps(cls, sessionType):
return ['Courtyard']
@classmethod
def supportsSessionType(cls, sessionType):
return True if issubclass(sessionType, bs.TeamsSession) else False
def __init__(self,settings):
bs.TeamGameActivity.__init__(self,settings)
if self.settings['Epic Mode']: self._isSlowMotion = True
self.info = bs.NodeActor(bs.newNode('text',
attrs={'vAttach': 'bottom',
'hAlign': 'center',
'vrDepth': 0,
'color': (0,.2,0),
'shadow': 1.0,
'flatness': 1.0,
'position': (0,0),
'scale': 0.8,
'text': "Created by MattZ45986 on Github",
}))
self.possession = True
self.heldLast = None
self.fouled = False
self.firstFoul = False
self.jb = True
self.blueBench = bs.newNode('light', attrs={
'color':(0,0,1),'intensity':1,'position':(-6.5,0,-2)})
self.redBench = bs.newNode('light', attrs={
'color':(1,0,0),'intensity':1,'position':(6.5,0,-2)})
self._bots = bs.BotSet()
self.hoop = Hoop((0,5,-8), (1,1,1))
self.threePointLine = ThreePointLine().autoRetain()
self._scoredis = bs.ScoreBoard()
self.referee = Referee
bs.gameTimer(10,bs.Call(self._bots.spawnBot,self.referee,pos=(-6,3,-6),spawnTime=1))
def onTransitionIn(self):
bs.TeamGameActivity.onTransitionIn(self,music='Sports')
def onBegin(self):
bs.TeamGameActivity.onBegin(self)
s = self.settings
for player in self.players:
player.actor.connectControlsToPlayer(enableBomb=False, enableRun = s["Enable Running"], enableJump = s["Enable Jumping"])
player.sessionData['fouls'] = 0
self.respawnBall(None)
self.teams[0].gameData['score'] = 0
self.teams[1].gameData['score'] = 0
self._scoredis.setTeamValue(self.teams[0],self.teams[1].gameData['score'])
self._scoredis.setTeamValue(self.teams[1],self.teams[1].gameData['score'])
self.updateScore()
self.checkEnd()
def spawnPlayerSpaz(self,player,position=(0,5,-3),angle=None, killedDuringFoulShots = False):
name = player.getName()
color = player.color
highlight = player.highlight
spaz = Baller(color=color,
highlight=highlight,
character=player.character,
player=player)
player.setActor(spaz)
if player in self.teams[0].players: position = (-6.5,3.2,(random.random()*5)-4.5)
else: position = (6.5,3.2,(random.random()*5)-4.5)
if self.fouled == True and killedDuringFoulShots == False: position = (0,3.2,-3)
s = self.settings
player.actor.connectControlsToPlayer(enableBomb=False, enableRun = s["Enable Running"], enableJump = s["Enable Jumping"])
spaz.handleMessage(bs.StandMessage(position,90))
def respawnBall(self, owner):
if owner == True:
self.basketball = BasketBomb(position=(-6,5,-3)).autoRetain()
elif owner == False:
self.basketball = BasketBomb(position=(6,5,-3)).autoRetain()
else:
self.basketball = BasketBomb(position=(0,5,-2.5)).autoRetain()
def handleMessage(self, m):
if isinstance(m, bs.SpazBotDeathMessage):
if m.killerPlayer in self.teams[0].players:
results = bs.TeamGameResults()
results.setTeamScore(self.teams[0],0)
results.setTeamScore(self.teams[1],100)
self.end(results=results)
bs.screenMessage("Don't take it out on the ref!", color=(1,0,0))
elif m.killerPlayer in self.teams[1].players:
results = bs.TeamGameResults()
results.setTeamScore(self.teams[1],0)
results.setTeamScore(self.teams[0],100)
self.end(results=results)
bs.screenMessage("Don't take it out on the ref!", color=(0,0,1))
elif isinstance(m, bs.PlayerSpazDeathMessage):
if m.killed:
if m.spaz.getPlayer() in self.teams[0].players:
team = self.teams[0]
elif m.spaz.getPlayer() in self.teams[1].players: team = self.teams[1]
if m.killerPlayer not in team.players:
m.killerPlayer.sessionData['fouls'] += 1
m.killerPlayer.actor.setScoreText("FOUL " + str(m.killerPlayer.sessionData['fouls']))
bs.playSound(bs.getSound('bearDeath'))
if m.killerPlayer.sessionData['fouls'] == 3: self.foulOut(m.killerPlayer)
if self.fouled == True:
self.spawnPlayerSpaz(player=m.spaz.getPlayer(),killedDuringFoulShots=True)
return
self.fouled = True
self.giveFoulShots(m.spaz)
elif m.spaz.getPlayer().sessionData['fouls'] < 3: self.respawnPlayer(m.spaz.getPlayer())
elif m.spaz.getPlayer().sessionData['fouls'] < 3: self.respawnPlayer(m.spaz.getPlayer())
s = self.settings
else: bs.TeamGameActivity.handleMessage(self, m)
def giveFoulShots(self, player):
for p in self.players:
p.actor.disconnectControlsFromPlayer()
if p in self.teams[0].players and p != player: p.actor.node.handleMessage('stand',-6.5,3.2,(random.random()*5)-4.5, 0)
if p in self.teams[1].players and p != player: p.actor.node.handleMessage('stand',6.5,3.2,(random.random()*5)-4.5, 0)
self.spawnPlayerSpaz(player.getPlayer())
name = player.getPlayer().getName()
for p in self.players:
if p.getName() == name:
player = p.actor
s = self.settings
player.connectControlsToPlayer(enableBomb=False, enableRun = s["Enable Running"], enableJump = s["Enable Jumping"],enablePunch = False)
self.firstFoul = True
self.basketball.node.delete()
self.respawnBall(None)
sound = bs.getSound('announceTwo')
bs.gameTimer(1000, bs.Call(bs.playSound,sound))
player.node.handleMessage('stand',0,3.2,-3, 0)
player.onPickUpPress()
player.onPickUpRelease()
player.setScoreText("5 seconds to shoot")
bs.gameTimer(6000, bs.Call(self.continueFoulShots,player))
def continueFoulShots(self, player):
if self.basketball.node.exists(): self.basketball.node.delete()
self.firstFoul = False
player.node.handleMessage('stand',0,3.2,-3, 0)
self.respawnBall(None)
bs.playSound(bs.getSound('announceOne'))
player.onPickUpPress()
player.onPickUpRelease()
bs.playSound(bs.getSound('bear1'))
player.setScoreText("5 seconds to shoot")
bs.gameTimer(6000, bs.Call(self.continuePlay))
def continuePlay(self):
self.fouled = False
if self.basketball.node.exists(): self.basketball.node.delete()
self.respawnBall(not self.possession)
s = self.settings
for player in self.players:
player.actor.connectControlsToPlayer(enableBomb=False, enableRun = s["Enable Running"], enableJump = s["Enable Jumping"], enablePunch = True)
if player in self.teams[0].players:
player.actor.node.handleMessage('stand',-6.5,3.2,(random.random()*5)-4.5, 0)
elif player in self.teams[1].players:
player.actor.node.handleMessage('stand',6.5,3.2,(random.random()*5)-4.5, 0)
def foulOut(self, player):
player.actor.shatter()
player.actor.setScoreText("FOULED OUT")
def jumpBall(self):
ball = self.basketball
if ball.up == True:
self.basketball.heldLast.actor.setScoreText("Jump Ball")
for player in self.teams[0].players:
player.actor.node.handleMessage('stand',-6.5,3.2,(random.random()*5)-4.5, 0)
for player in self.teams[1].players:
player.actor.node.handleMessage('stand',6.5,3.2,(random.random()*5)-4.5, 0)
ball.node.delete()
self.respawnBall(not self.jb)
self.jb = not self.jb
def handleShot(self, ball):
if ball.node.position[0] > -1.5 and ball.node.position[0] < 1.5:
if ball.node.position[1] > 4 and ball.node.position[1] < 5:
if ball.node.position[2] > -9 and ball.node.position[2] < -8:
if self.isTendingGoal(ball):
ball.node.delete()
self.respawnBall(not self.possession)
bs.playSound(bs.getSound('bearDeath'))
ball.heldLast.actor.shatter()
return
bs.playSound(bs.getSound('bear' +str(random.randint(1,4))))
for node in self.hoop._nodes:
node.delete()
self.hoop = None
if not self.fouled:
if self.possession:
pts = self.checkThreePoint(ball)
self.teams[0].gameData['score'] += pts
ball.heldLast.actor.setScoreText(str(pts) + " Points")
self.hoop = Hoop((0,5,-8),(0,0,1))
for player in self.teams[0].players:
player.actor.node.handleMessage('stand',-6.5,3.2,(random.random()*5)-4.5, 0)
else:
pts = self.checkThreePoint(ball)
self.teams[1].gameData['score'] += pts
ball.heldLast.actor.setScoreText(str(pts) + " Points")
self.hoop = Hoop((0,5,-8),(1,0,0))
for player in self.teams[1].players:
player.actor.node.handleMessage('stand',6.5,3.2,(random.random()*5)-4.5, 0)
self.updateScore()
ball.node.delete()
self.respawnBall(not self.possession)
else:
if self.possession:
self.hoop = Hoop((0,5,-8),(0,0,1))
self.teams[0].gameData['score'] += 1
else:
self.hoop = Hoop((0,5,-8),(1,0,0))
self.teams[1].gameData['score'] += 1
ball.heldLast.actor.setScoreText("1 Point")
self.updateScore()
ball.node.delete()
def checkThreePoint(self, ball):
pos = ball.heldLast.actor._pos
if pos[0]*pos[0] + (pos[2]+8)*(pos[2]+8) >= 36: return 3
else: return 2
def isTendingGoal(self,ball):
pos = ball.heldLast.actor._pos
if pos[0] > -1.5 and pos[0] < 1.5:
if pos[2] > -9 and pos[2] < -8: return True
return False
def updateScore(self):
for team in self.teams:
self._scoredis.setTeamValue(team,team.gameData['score'])
self.checkEnd()
def checkEnd(self):
for team in self.teams:
i = 0
if team.gameData['score'] >= self.settings['Play To: ']: self.endGame()
for player in team.players:
if player.isAlive(): i = 1
if i == 0: self.endGame()
def endGame(self):
results = bs.TeamGameResults()
for team in self.teams:
results.setTeamScore(team, team.gameData['score'])
i = 0
for player in team.players:
if player.isAlive(): i = 1
if i == 0: results.setTeamScore(team, 0)
self.end(results=results)
| unlicense |
TobbeEdgeware/shaka-player | build/test.py | 1 | 3839 | #!/usr/bin/python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Runs unit and integrations tests on the library."""
import platform
import sys
import build
import gendeps
import shakaBuildHelpers
def run_tests_single(args):
"""Runs all the karma tests."""
# Update node modules if needed.
if not shakaBuildHelpers.update_node_modules():
return 1
# Generate dependencies and compile library.
# This is required for the tests.
if gendeps.gen_deps([]) != 0:
return 1
build_args = []
if '--force' in args:
build_args.append('--force')
args.remove('--force')
if '--no-build' in args:
args.remove('--no-build')
else:
if build.main(build_args) != 0:
return 1
karma_path = shakaBuildHelpers.get_node_binary_path('karma')
cmd = [karma_path, 'start']
if shakaBuildHelpers.is_linux() and '--use-xvfb' in args:
cmd = ['xvfb-run', '--auto-servernum'] + cmd
# Get the browsers supported on the local system.
browsers = _get_browsers()
if not browsers:
print >> sys.stderr, 'Unrecognized system "%s"' % platform.uname()[0]
return 1
print 'Starting tests...'
if not args:
# Run tests in all available browsers.
print 'Running with platform default:', '--browsers', browsers
cmd_line = cmd + ['--browsers', browsers]
return shakaBuildHelpers.execute_get_code(cmd_line)
else:
# Run with command-line arguments from the user.
if '--browsers' not in args:
print 'No --browsers specified.'
print 'In this mode, browsers must be manually connected to karma.'
cmd_line = cmd + args
return shakaBuildHelpers.execute_get_code(cmd_line)
def run_tests_multiple(args):
"""Runs multiple iterations of the tests when --runs is set."""
index = args.index('--runs') + 1
if index == len(args) or args[index].startswith('--'):
print >> sys.stderr, 'Argument Error: --runs requires a value.'
return 1
try:
runs = int(args[index])
except ValueError:
print >> sys.stderr, 'Argument Error: --runs value must be an integer.'
return 1
if runs <= 0:
print >> sys.stderr, 'Argument Error: --runs value must be greater than 0.'
return 1
results = []
print '\nRunning the tests %d times.' % runs
for _ in range(runs):
results.append(run_tests_single(args))
print '\nAll runs completed.'
print '%d passed out of %d total runs.' % (results.count(0), len(results))
print 'Results (exit code): %r' % results
return all(result == 0 for result in results)
def run_tests(args):
if '--runs' in args:
return run_tests_multiple(args)
else:
return run_tests_single(args)
def _get_browsers():
"""Uses the platform name to configure which browsers will be tested."""
browsers = None
if shakaBuildHelpers.is_linux():
# For MP4 support on Linux Firefox, install gstreamer1.0-libav.
# Opera on Linux only supports MP4 for Ubuntu 15.04+, so it is not in the
# default list of browsers for Linux at this time.
browsers = 'Chrome,Firefox'
elif shakaBuildHelpers.is_darwin():
browsers = 'Chrome,Firefox,Safari'
elif shakaBuildHelpers.is_windows() or shakaBuildHelpers.is_cygwin():
browsers = 'Chrome,Firefox,IE'
return browsers
if __name__ == '__main__':
shakaBuildHelpers.run_main(run_tests)
| apache-2.0 |
andrewsmedina/django | tests/signals/tests.py | 112 | 5272 | from __future__ import absolute_import
from django.db.models import signals
from django.dispatch import receiver
from django.test import TestCase
from django.utils import six
from .models import Person, Car
# #8285: signals can be any callable
class PostDeleteHandler(object):
def __init__(self, data):
self.data = data
def __call__(self, signal, sender, instance, **kwargs):
self.data.append(
(instance, instance.id is None)
)
class MyReceiver(object):
def __init__(self, param):
self.param = param
self._run = False
def __call__(self, signal, sender, **kwargs):
self._run = True
signal.disconnect(receiver=self, sender=sender)
class SignalTests(TestCase):
def test_basic(self):
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered (#9989)
pre_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
data = []
def pre_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("raw", False))
)
signals.pre_save.connect(pre_save_test)
def post_save_test(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("created"), kwargs.get("raw", False))
)
signals.post_save.connect(post_save_test)
def pre_delete_test(signal, sender, instance, **kwargs):
data.append(
(instance, instance.id is None)
)
signals.pre_delete.connect(pre_delete_test)
post_delete_test = PostDeleteHandler(data)
signals.post_delete.connect(post_delete_test)
# throw a decorator syntax receiver into the mix
@receiver(signals.pre_save)
def pre_save_decorator_test(signal, sender, instance, **kwargs):
data.append(instance)
@receiver(signals.pre_save, sender=Car)
def pre_save_decorator_sender_test(signal, sender, instance, **kwargs):
data.append(instance)
p1 = Person(first_name="John", last_name="Smith")
self.assertEqual(data, [])
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, True, False),
])
data[:] = []
p1.first_name = "Tom"
p1.save()
self.assertEqual(data, [
(p1, False),
p1,
(p1, False, False),
])
data[:] = []
# Car signal (sender defined)
c1 = Car(make="Volkswagon", model="Passat")
c1.save()
self.assertEqual(data, [
(c1, False),
c1,
c1,
(c1, True, False),
])
data[:] = []
# Calling an internal method purely so that we can trigger a "raw" save.
p1.save_base(raw=True)
self.assertEqual(data, [
(p1, True),
p1,
(p1, False, True),
])
data[:] = []
p1.delete()
self.assertEqual(data, [
(p1, False),
(p1, False),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.id = 99998
p2.save()
self.assertEqual(data, [
(p2, False),
p2,
(p2, True, False),
])
data[:] = []
p2.delete()
self.assertEqual(data, [
(p2, False),
(p2, False)
])
self.assertQuerysetEqual(
Person.objects.all(), [
"James Jones",
],
six.text_type
)
signals.post_delete.disconnect(post_delete_test)
signals.pre_delete.disconnect(pre_delete_test)
signals.post_save.disconnect(post_save_test)
signals.pre_save.disconnect(pre_save_test)
signals.pre_save.disconnect(pre_save_decorator_test)
signals.pre_save.disconnect(pre_save_decorator_sender_test, sender=Car)
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
self.assertEqual(pre_signals, post_signals)
def test_disconnect_in_dispatch(self):
"""
Test that signals that disconnect when being called don't mess future
dispatching.
"""
a, b = MyReceiver(1), MyReceiver(2)
signals.post_save.connect(sender=Person, receiver=a)
signals.post_save.connect(sender=Person, receiver=b)
p = Person.objects.create(first_name='John', last_name='Smith')
self.assertTrue(a._run)
self.assertTrue(b._run)
self.assertEqual(signals.post_save.receivers, [])
| bsd-3-clause |
goddino/libjingle | trunk/third_party/libvpx/source/libvpx/third_party/googletest/src/test/gtest_color_test.py | 3259 | 4911 | #!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly determines whether to use colors."""
__author__ = 'wan@google.com (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name = 'nt'
COLOR_ENV_VAR = 'GTEST_COLOR'
COLOR_FLAG = 'gtest_color'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_color_test_')
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def UsesColor(term, color_env_var, color_flag):
"""Runs gtest_color_test_ and returns its exit code."""
SetEnvVar('TERM', term)
SetEnvVar(COLOR_ENV_VAR, color_env_var)
if color_flag is None:
args = []
else:
args = ['--%s=%s' % (COLOR_FLAG, color_flag)]
p = gtest_test_utils.Subprocess([COMMAND] + args)
return not p.exited or p.exit_code
class GTestColorTest(gtest_test_utils.TestCase):
def testNoEnvVarNoFlag(self):
"""Tests the case when there's neither GTEST_COLOR nor --gtest_color."""
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', None, None))
self.assert_(not UsesColor('emacs', None, None))
self.assert_(not UsesColor('xterm-mono', None, None))
self.assert_(not UsesColor('unknown', None, None))
self.assert_(not UsesColor(None, None, None))
self.assert_(UsesColor('linux', None, None))
self.assert_(UsesColor('cygwin', None, None))
self.assert_(UsesColor('xterm', None, None))
self.assert_(UsesColor('xterm-color', None, None))
self.assert_(UsesColor('xterm-256color', None, None))
def testFlagOnly(self):
"""Tests the case when there's --gtest_color but not GTEST_COLOR."""
self.assert_(not UsesColor('dumb', None, 'no'))
self.assert_(not UsesColor('xterm-color', None, 'no'))
if not IS_WINDOWS:
self.assert_(not UsesColor('emacs', None, 'auto'))
self.assert_(UsesColor('xterm', None, 'auto'))
self.assert_(UsesColor('dumb', None, 'yes'))
self.assert_(UsesColor('xterm', None, 'yes'))
def testEnvVarOnly(self):
"""Tests the case when there's GTEST_COLOR but not --gtest_color."""
self.assert_(not UsesColor('dumb', 'no', None))
self.assert_(not UsesColor('xterm-color', 'no', None))
if not IS_WINDOWS:
self.assert_(not UsesColor('dumb', 'auto', None))
self.assert_(UsesColor('xterm-color', 'auto', None))
self.assert_(UsesColor('dumb', 'yes', None))
self.assert_(UsesColor('xterm-color', 'yes', None))
def testEnvVarAndFlag(self):
"""Tests the case when there are both GTEST_COLOR and --gtest_color."""
self.assert_(not UsesColor('xterm-color', 'no', 'no'))
self.assert_(UsesColor('dumb', 'no', 'yes'))
self.assert_(UsesColor('xterm-color', 'no', 'auto'))
def testAliasesOfYesAndNo(self):
"""Tests using aliases in specifying --gtest_color."""
self.assert_(UsesColor('dumb', None, 'true'))
self.assert_(UsesColor('dumb', None, 'YES'))
self.assert_(UsesColor('dumb', None, 'T'))
self.assert_(UsesColor('dumb', None, '1'))
self.assert_(not UsesColor('xterm', None, 'f'))
self.assert_(not UsesColor('xterm', None, 'false'))
self.assert_(not UsesColor('xterm', None, '0'))
self.assert_(not UsesColor('xterm', None, 'unknown'))
if __name__ == '__main__':
gtest_test_utils.Main()
| bsd-3-clause |
zhukaixy/kbengine | kbe/src/lib/python/Tools/pybench/pybench.py | 45 | 32619 | #!/usr/local/bin/python -O
""" A Python Benchmark Suite
"""
# Note: Please keep this module compatible to Python 2.6.
#
# Tests may include features in later Python versions, but these
# should then be embedded in try-except clauses in the configuration
# module Setup.py.
#
from __future__ import print_function
# pybench Copyright
__copyright__ = """\
Copyright (c), 1997-2006, Marc-Andre Lemburg (mal@lemburg.com)
Copyright (c), 2000-2006, eGenix.com Software GmbH (info@egenix.com)
All Rights Reserved.
Permission to use, copy, modify, and distribute this software and its
documentation for any purpose and without fee or royalty is hereby
granted, provided that the above copyright notice appear in all copies
and that both that copyright notice and this permission notice appear
in supporting documentation or portions thereof, including
modifications, that you make.
THE AUTHOR MARC-ANDRE LEMBURG DISCLAIMS ALL WARRANTIES WITH REGARD TO
THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
"""
import sys
import time
import platform
from CommandLine import *
try:
import cPickle
pickle = cPickle
except ImportError:
import pickle
# Version number; version history: see README file !
__version__ = '2.1'
### Constants
# Second fractions
MILLI_SECONDS = 1e3
MICRO_SECONDS = 1e6
# Percent unit
PERCENT = 100
# Horizontal line length
LINE = 79
# Minimum test run-time
MIN_TEST_RUNTIME = 1e-3
# Number of calibration runs to use for calibrating the tests
CALIBRATION_RUNS = 20
# Number of calibration loops to run for each calibration run
CALIBRATION_LOOPS = 20
# Allow skipping calibration ?
ALLOW_SKIPPING_CALIBRATION = 1
# Timer types
TIMER_TIME_TIME = 'time.time'
TIMER_TIME_PROCESS_TIME = 'time.process_time'
TIMER_TIME_PERF_COUNTER = 'time.perf_counter'
TIMER_TIME_CLOCK = 'time.clock'
TIMER_SYSTIMES_PROCESSTIME = 'systimes.processtime'
# Choose platform default timer
if hasattr(time, 'perf_counter'):
TIMER_PLATFORM_DEFAULT = TIMER_TIME_PERF_COUNTER
elif sys.platform[:3] == 'win':
# On WinXP this has 2.5ms resolution
TIMER_PLATFORM_DEFAULT = TIMER_TIME_CLOCK
else:
# On Linux this has 1ms resolution
TIMER_PLATFORM_DEFAULT = TIMER_TIME_TIME
# Print debug information ?
_debug = 0
### Helpers
def get_timer(timertype):
if timertype == TIMER_TIME_TIME:
return time.time
elif timertype == TIMER_TIME_PROCESS_TIME:
return time.process_time
elif timertype == TIMER_TIME_PERF_COUNTER:
return time.perf_counter
elif timertype == TIMER_TIME_CLOCK:
return time.clock
elif timertype == TIMER_SYSTIMES_PROCESSTIME:
import systimes
return systimes.processtime
else:
raise TypeError('unknown timer type: %s' % timertype)
def get_machine_details():
if _debug:
print('Getting machine details...')
buildno, builddate = platform.python_build()
python = platform.python_version()
# XXX this is now always UCS4, maybe replace it with 'PEP393' in 3.3+?
if sys.maxunicode == 65535:
# UCS2 build (standard)
unitype = 'UCS2'
else:
# UCS4 build (most recent Linux distros)
unitype = 'UCS4'
bits, linkage = platform.architecture()
return {
'platform': platform.platform(),
'processor': platform.processor(),
'executable': sys.executable,
'implementation': getattr(platform, 'python_implementation',
lambda:'n/a')(),
'python': platform.python_version(),
'compiler': platform.python_compiler(),
'buildno': buildno,
'builddate': builddate,
'unicode': unitype,
'bits': bits,
}
def print_machine_details(d, indent=''):
l = ['Machine Details:',
' Platform ID: %s' % d.get('platform', 'n/a'),
' Processor: %s' % d.get('processor', 'n/a'),
'',
'Python:',
' Implementation: %s' % d.get('implementation', 'n/a'),
' Executable: %s' % d.get('executable', 'n/a'),
' Version: %s' % d.get('python', 'n/a'),
' Compiler: %s' % d.get('compiler', 'n/a'),
' Bits: %s' % d.get('bits', 'n/a'),
' Build: %s (#%s)' % (d.get('builddate', 'n/a'),
d.get('buildno', 'n/a')),
' Unicode: %s' % d.get('unicode', 'n/a'),
]
joiner = '\n' + indent
print(indent + joiner.join(l) + '\n')
### Test baseclass
class Test:
""" All test must have this class as baseclass. It provides
the necessary interface to the benchmark machinery.
The tests must set .rounds to a value high enough to let the
test run between 20-50 seconds. This is needed because
clock()-timing only gives rather inaccurate values (on Linux,
for example, it is accurate to a few hundreths of a
second). If you don't want to wait that long, use a warp
factor larger than 1.
It is also important to set the .operations variable to a
value representing the number of "virtual operations" done per
call of .run().
If you change a test in some way, don't forget to increase
its version number.
"""
### Instance variables that each test should override
# Version number of the test as float (x.yy); this is important
# for comparisons of benchmark runs - tests with unequal version
# number will not get compared.
version = 2.1
# The number of abstract operations done in each round of the
# test. An operation is the basic unit of what you want to
# measure. The benchmark will output the amount of run-time per
# operation. Note that in order to raise the measured timings
# significantly above noise level, it is often required to repeat
# sets of operations more than once per test round. The measured
# overhead per test round should be less than 1 second.
operations = 1
# Number of rounds to execute per test run. This should be
# adjusted to a figure that results in a test run-time of between
# 1-2 seconds.
rounds = 100000
### Internal variables
# Mark this class as implementing a test
is_a_test = 1
# Last timing: (real, run, overhead)
last_timing = (0.0, 0.0, 0.0)
# Warp factor to use for this test
warp = 1
# Number of calibration runs to use
calibration_runs = CALIBRATION_RUNS
# List of calibration timings
overhead_times = None
# List of test run timings
times = []
# Timer used for the benchmark
timer = TIMER_PLATFORM_DEFAULT
def __init__(self, warp=None, calibration_runs=None, timer=None):
# Set parameters
if warp is not None:
self.rounds = int(self.rounds / warp)
if self.rounds == 0:
raise ValueError('warp factor set too high')
self.warp = warp
if calibration_runs is not None:
if (not ALLOW_SKIPPING_CALIBRATION and
calibration_runs < 1):
raise ValueError('at least one calibration run is required')
self.calibration_runs = calibration_runs
if timer is not None:
self.timer = timer
# Init variables
self.times = []
self.overhead_times = []
# We want these to be in the instance dict, so that pickle
# saves them
self.version = self.version
self.operations = self.operations
self.rounds = self.rounds
def get_timer(self):
""" Return the timer function to use for the test.
"""
return get_timer(self.timer)
def compatible(self, other):
""" Return 1/0 depending on whether the test is compatible
with the other Test instance or not.
"""
if self.version != other.version:
return 0
if self.rounds != other.rounds:
return 0
return 1
def calibrate_test(self):
if self.calibration_runs == 0:
self.overhead_times = [0.0]
return
calibrate = self.calibrate
timer = self.get_timer()
calibration_loops = range(CALIBRATION_LOOPS)
# Time the calibration loop overhead
prep_times = []
for i in range(self.calibration_runs):
t = timer()
for i in calibration_loops:
pass
t = timer() - t
prep_times.append(t / CALIBRATION_LOOPS)
min_prep_time = min(prep_times)
if _debug:
print()
print('Calib. prep time = %.6fms' % (
min_prep_time * MILLI_SECONDS))
# Time the calibration runs (doing CALIBRATION_LOOPS loops of
# .calibrate() method calls each)
for i in range(self.calibration_runs):
t = timer()
for i in calibration_loops:
calibrate()
t = timer() - t
self.overhead_times.append(t / CALIBRATION_LOOPS
- min_prep_time)
# Check the measured times
min_overhead = min(self.overhead_times)
max_overhead = max(self.overhead_times)
if _debug:
print('Calib. overhead time = %.6fms' % (
min_overhead * MILLI_SECONDS))
if min_overhead < 0.0:
raise ValueError('calibration setup did not work')
if max_overhead - min_overhead > 0.1:
raise ValueError(
'overhead calibration timing range too inaccurate: '
'%r - %r' % (min_overhead, max_overhead))
def run(self):
""" Run the test in two phases: first calibrate, then
do the actual test. Be careful to keep the calibration
timing low w/r to the test timing.
"""
test = self.test
timer = self.get_timer()
# Get calibration
min_overhead = min(self.overhead_times)
# Test run
t = timer()
test()
t = timer() - t
if t < MIN_TEST_RUNTIME:
raise ValueError('warp factor too high: '
'test times are < 10ms')
eff_time = t - min_overhead
if eff_time < 0:
raise ValueError('wrong calibration')
self.last_timing = (eff_time, t, min_overhead)
self.times.append(eff_time)
def calibrate(self):
""" Calibrate the test.
This method should execute everything that is needed to
setup and run the test - except for the actual operations
that you intend to measure. pybench uses this method to
measure the test implementation overhead.
"""
return
def test(self):
""" Run the test.
The test needs to run self.rounds executing
self.operations number of operations each.
"""
return
def stat(self):
""" Return test run statistics as tuple:
(minimum run time,
average run time,
total run time,
average time per operation,
minimum overhead time)
"""
runs = len(self.times)
if runs == 0:
return 0.0, 0.0, 0.0, 0.0
min_time = min(self.times)
total_time = sum(self.times)
avg_time = total_time / float(runs)
operation_avg = total_time / float(runs
* self.rounds
* self.operations)
if self.overhead_times:
min_overhead = min(self.overhead_times)
else:
min_overhead = self.last_timing[2]
return min_time, avg_time, total_time, operation_avg, min_overhead
### Load Setup
# This has to be done after the definition of the Test class, since
# the Setup module will import subclasses using this class.
import Setup
### Benchmark base class
class Benchmark:
# Name of the benchmark
name = ''
# Number of benchmark rounds to run
rounds = 1
# Warp factor use to run the tests
warp = 1 # Warp factor
# Average benchmark round time
roundtime = 0
# Benchmark version number as float x.yy
version = 2.1
# Produce verbose output ?
verbose = 0
# Dictionary with the machine details
machine_details = None
# Timer used for the benchmark
timer = TIMER_PLATFORM_DEFAULT
def __init__(self, name, verbose=None, timer=None, warp=None,
calibration_runs=None):
if name:
self.name = name
else:
self.name = '%04i-%02i-%02i %02i:%02i:%02i' % \
(time.localtime(time.time())[:6])
if verbose is not None:
self.verbose = verbose
if timer is not None:
self.timer = timer
if warp is not None:
self.warp = warp
if calibration_runs is not None:
self.calibration_runs = calibration_runs
# Init vars
self.tests = {}
if _debug:
print('Getting machine details...')
self.machine_details = get_machine_details()
# Make .version an instance attribute to have it saved in the
# Benchmark pickle
self.version = self.version
def get_timer(self):
""" Return the timer function to use for the test.
"""
return get_timer(self.timer)
def compatible(self, other):
""" Return 1/0 depending on whether the benchmark is
compatible with the other Benchmark instance or not.
"""
if self.version != other.version:
return 0
if (self.machine_details == other.machine_details and
self.timer != other.timer):
return 0
if (self.calibration_runs == 0 and
other.calibration_runs != 0):
return 0
if (self.calibration_runs != 0 and
other.calibration_runs == 0):
return 0
return 1
def load_tests(self, setupmod, limitnames=None):
# Add tests
if self.verbose:
print('Searching for tests ...')
print('--------------------------------------')
for testclass in setupmod.__dict__.values():
if not hasattr(testclass, 'is_a_test'):
continue
name = testclass.__name__
if name == 'Test':
continue
if (limitnames is not None and
limitnames.search(name) is None):
continue
self.tests[name] = testclass(
warp=self.warp,
calibration_runs=self.calibration_runs,
timer=self.timer)
l = sorted(self.tests)
if self.verbose:
for name in l:
print(' %s' % name)
print('--------------------------------------')
print(' %i tests found' % len(l))
print()
def calibrate(self):
print('Calibrating tests. Please wait...', end=' ')
sys.stdout.flush()
if self.verbose:
print()
print()
print('Test min max')
print('-' * LINE)
tests = sorted(self.tests.items())
for i in range(len(tests)):
name, test = tests[i]
test.calibrate_test()
if self.verbose:
print('%30s: %6.3fms %6.3fms' % \
(name,
min(test.overhead_times) * MILLI_SECONDS,
max(test.overhead_times) * MILLI_SECONDS))
if self.verbose:
print()
print('Done with the calibration.')
else:
print('done.')
print()
def run(self):
tests = sorted(self.tests.items())
timer = self.get_timer()
print('Running %i round(s) of the suite at warp factor %i:' % \
(self.rounds, self.warp))
print()
self.roundtimes = []
for i in range(self.rounds):
if self.verbose:
print(' Round %-25i effective absolute overhead' % (i+1))
total_eff_time = 0.0
for j in range(len(tests)):
name, test = tests[j]
if self.verbose:
print('%30s:' % name, end=' ')
test.run()
(eff_time, abs_time, min_overhead) = test.last_timing
total_eff_time = total_eff_time + eff_time
if self.verbose:
print(' %5.0fms %5.0fms %7.3fms' % \
(eff_time * MILLI_SECONDS,
abs_time * MILLI_SECONDS,
min_overhead * MILLI_SECONDS))
self.roundtimes.append(total_eff_time)
if self.verbose:
print(' '
' ------------------------------')
print(' '
' Totals: %6.0fms' %
(total_eff_time * MILLI_SECONDS))
print()
else:
print('* Round %i done in %.3f seconds.' % (i+1,
total_eff_time))
print()
def stat(self):
""" Return benchmark run statistics as tuple:
(minimum round time,
average round time,
maximum round time)
XXX Currently not used, since the benchmark does test
statistics across all rounds.
"""
runs = len(self.roundtimes)
if runs == 0:
return 0.0, 0.0
min_time = min(self.roundtimes)
total_time = sum(self.roundtimes)
avg_time = total_time / float(runs)
max_time = max(self.roundtimes)
return (min_time, avg_time, max_time)
def print_header(self, title='Benchmark'):
print('-' * LINE)
print('%s: %s' % (title, self.name))
print('-' * LINE)
print()
print(' Rounds: %s' % self.rounds)
print(' Warp: %s' % self.warp)
print(' Timer: %s' % self.timer)
print()
if self.machine_details:
print_machine_details(self.machine_details, indent=' ')
print()
def print_benchmark(self, hidenoise=0, limitnames=None):
print('Test '
' minimum average operation overhead')
print('-' * LINE)
tests = sorted(self.tests.items())
total_min_time = 0.0
total_avg_time = 0.0
for name, test in tests:
if (limitnames is not None and
limitnames.search(name) is None):
continue
(min_time,
avg_time,
total_time,
op_avg,
min_overhead) = test.stat()
total_min_time = total_min_time + min_time
total_avg_time = total_avg_time + avg_time
print('%30s: %5.0fms %5.0fms %6.2fus %7.3fms' % \
(name,
min_time * MILLI_SECONDS,
avg_time * MILLI_SECONDS,
op_avg * MICRO_SECONDS,
min_overhead *MILLI_SECONDS))
print('-' * LINE)
print('Totals: '
' %6.0fms %6.0fms' %
(total_min_time * MILLI_SECONDS,
total_avg_time * MILLI_SECONDS,
))
print()
def print_comparison(self, compare_to, hidenoise=0, limitnames=None):
# Check benchmark versions
if compare_to.version != self.version:
print('* Benchmark versions differ: '
'cannot compare this benchmark to "%s" !' %
compare_to.name)
print()
self.print_benchmark(hidenoise=hidenoise,
limitnames=limitnames)
return
# Print header
compare_to.print_header('Comparing with')
print('Test '
' minimum run-time average run-time')
print(' '
' this other diff this other diff')
print('-' * LINE)
# Print test comparisons
tests = sorted(self.tests.items())
total_min_time = other_total_min_time = 0.0
total_avg_time = other_total_avg_time = 0.0
benchmarks_compatible = self.compatible(compare_to)
tests_compatible = 1
for name, test in tests:
if (limitnames is not None and
limitnames.search(name) is None):
continue
(min_time,
avg_time,
total_time,
op_avg,
min_overhead) = test.stat()
total_min_time = total_min_time + min_time
total_avg_time = total_avg_time + avg_time
try:
other = compare_to.tests[name]
except KeyError:
other = None
if other is None:
# Other benchmark doesn't include the given test
min_diff, avg_diff = 'n/a', 'n/a'
other_min_time = 0.0
other_avg_time = 0.0
tests_compatible = 0
else:
(other_min_time,
other_avg_time,
other_total_time,
other_op_avg,
other_min_overhead) = other.stat()
other_total_min_time = other_total_min_time + other_min_time
other_total_avg_time = other_total_avg_time + other_avg_time
if (benchmarks_compatible and
test.compatible(other)):
# Both benchmark and tests are comparable
min_diff = ((min_time * self.warp) /
(other_min_time * other.warp) - 1.0)
avg_diff = ((avg_time * self.warp) /
(other_avg_time * other.warp) - 1.0)
if hidenoise and abs(min_diff) < 10.0:
min_diff = ''
else:
min_diff = '%+5.1f%%' % (min_diff * PERCENT)
if hidenoise and abs(avg_diff) < 10.0:
avg_diff = ''
else:
avg_diff = '%+5.1f%%' % (avg_diff * PERCENT)
else:
# Benchmark or tests are not comparable
min_diff, avg_diff = 'n/a', 'n/a'
tests_compatible = 0
print('%30s: %5.0fms %5.0fms %7s %5.0fms %5.0fms %7s' % \
(name,
min_time * MILLI_SECONDS,
other_min_time * MILLI_SECONDS * compare_to.warp / self.warp,
min_diff,
avg_time * MILLI_SECONDS,
other_avg_time * MILLI_SECONDS * compare_to.warp / self.warp,
avg_diff))
print('-' * LINE)
# Summarise test results
if not benchmarks_compatible or not tests_compatible:
min_diff, avg_diff = 'n/a', 'n/a'
else:
if other_total_min_time != 0.0:
min_diff = '%+5.1f%%' % (
((total_min_time * self.warp) /
(other_total_min_time * compare_to.warp) - 1.0) * PERCENT)
else:
min_diff = 'n/a'
if other_total_avg_time != 0.0:
avg_diff = '%+5.1f%%' % (
((total_avg_time * self.warp) /
(other_total_avg_time * compare_to.warp) - 1.0) * PERCENT)
else:
avg_diff = 'n/a'
print('Totals: '
' %5.0fms %5.0fms %7s %5.0fms %5.0fms %7s' %
(total_min_time * MILLI_SECONDS,
(other_total_min_time * compare_to.warp/self.warp
* MILLI_SECONDS),
min_diff,
total_avg_time * MILLI_SECONDS,
(other_total_avg_time * compare_to.warp/self.warp
* MILLI_SECONDS),
avg_diff
))
print()
print('(this=%s, other=%s)' % (self.name,
compare_to.name))
print()
class PyBenchCmdline(Application):
header = ("PYBENCH - a benchmark test suite for Python "
"interpreters/compilers.")
version = __version__
debug = _debug
options = [ArgumentOption('-n',
'number of rounds',
Setup.Number_of_rounds),
ArgumentOption('-f',
'save benchmark to file arg',
''),
ArgumentOption('-c',
'compare benchmark with the one in file arg',
''),
ArgumentOption('-s',
'show benchmark in file arg, then exit',
''),
ArgumentOption('-w',
'set warp factor to arg',
Setup.Warp_factor),
ArgumentOption('-t',
'run only tests with names matching arg',
''),
ArgumentOption('-C',
'set the number of calibration runs to arg',
CALIBRATION_RUNS),
SwitchOption('-d',
'hide noise in comparisons',
0),
SwitchOption('-v',
'verbose output (not recommended)',
0),
SwitchOption('--with-gc',
'enable garbage collection',
0),
SwitchOption('--with-syscheck',
'use default sys check interval',
0),
ArgumentOption('--timer',
'use given timer',
TIMER_PLATFORM_DEFAULT),
]
about = """\
The normal operation is to run the suite and display the
results. Use -f to save them for later reuse or comparisons.
Available timers:
time.time
time.clock
systimes.processtime
Examples:
python2.1 pybench.py -f p21.pybench
python2.5 pybench.py -f p25.pybench
python pybench.py -s p25.pybench -c p21.pybench
"""
copyright = __copyright__
def main(self):
rounds = self.values['-n']
reportfile = self.values['-f']
show_bench = self.values['-s']
compare_to = self.values['-c']
hidenoise = self.values['-d']
warp = int(self.values['-w'])
withgc = self.values['--with-gc']
limitnames = self.values['-t']
if limitnames:
if _debug:
print('* limiting test names to one with substring "%s"' % \
limitnames)
limitnames = re.compile(limitnames, re.I)
else:
limitnames = None
verbose = self.verbose
withsyscheck = self.values['--with-syscheck']
calibration_runs = self.values['-C']
timer = self.values['--timer']
print('-' * LINE)
print('PYBENCH %s' % __version__)
print('-' * LINE)
print('* using %s %s' % (
getattr(platform, 'python_implementation', lambda:'Python')(),
' '.join(sys.version.split())))
# Switch off garbage collection
if not withgc:
try:
import gc
except ImportError:
print('* Python version doesn\'t support garbage collection')
else:
try:
gc.disable()
except NotImplementedError:
print('* Python version doesn\'t support gc.disable')
else:
print('* disabled garbage collection')
# "Disable" sys check interval
if not withsyscheck:
# Too bad the check interval uses an int instead of a long...
value = 2147483647
try:
sys.setcheckinterval(value)
except (AttributeError, NotImplementedError):
print('* Python version doesn\'t support sys.setcheckinterval')
else:
print('* system check interval set to maximum: %s' % value)
if timer == TIMER_SYSTIMES_PROCESSTIME:
import systimes
print('* using timer: systimes.processtime (%s)' % \
systimes.SYSTIMES_IMPLEMENTATION)
else:
# Check that the clock function does exist
try:
get_timer(timer)
except TypeError:
print("* Error: Unknown timer: %s" % timer)
return
print('* using timer: %s' % timer)
if hasattr(time, 'get_clock_info'):
info = time.get_clock_info(timer[5:])
print('* timer: resolution=%s, implementation=%s'
% (info.resolution, info.implementation))
print()
if compare_to:
try:
f = open(compare_to,'rb')
bench = pickle.load(f)
bench.name = compare_to
f.close()
compare_to = bench
except IOError as reason:
print('* Error opening/reading file %s: %s' % (
repr(compare_to),
reason))
compare_to = None
if show_bench:
try:
f = open(show_bench,'rb')
bench = pickle.load(f)
bench.name = show_bench
f.close()
bench.print_header()
if compare_to:
bench.print_comparison(compare_to,
hidenoise=hidenoise,
limitnames=limitnames)
else:
bench.print_benchmark(hidenoise=hidenoise,
limitnames=limitnames)
except IOError as reason:
print('* Error opening/reading file %s: %s' % (
repr(show_bench),
reason))
print()
return
if reportfile:
print('Creating benchmark: %s (rounds=%i, warp=%i)' % \
(reportfile, rounds, warp))
print()
# Create benchmark object
bench = Benchmark(reportfile,
verbose=verbose,
timer=timer,
warp=warp,
calibration_runs=calibration_runs)
bench.rounds = rounds
bench.load_tests(Setup, limitnames=limitnames)
try:
bench.calibrate()
bench.run()
except KeyboardInterrupt:
print()
print('*** KeyboardInterrupt -- Aborting')
print()
return
bench.print_header()
if compare_to:
bench.print_comparison(compare_to,
hidenoise=hidenoise,
limitnames=limitnames)
else:
bench.print_benchmark(hidenoise=hidenoise,
limitnames=limitnames)
# Ring bell
sys.stderr.write('\007')
if reportfile:
try:
f = open(reportfile,'wb')
bench.name = reportfile
pickle.dump(bench,f)
f.close()
except IOError as reason:
print('* Error opening/writing reportfile %s: %s' % (
reportfile,
reason))
print()
if __name__ == '__main__':
PyBenchCmdline()
| lgpl-3.0 |
shakamunyi/neutron-dvr | neutron/tests/unit/vmware/apiclient/fake.py | 5 | 28851 | # Copyright 2012 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six.moves.urllib.parse as urlparse
from neutron.openstack.common import jsonutils as json
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils
from neutron.plugins.vmware.api_client import exception as api_exc
LOG = logging.getLogger(__name__)
MAX_NAME_LEN = 40
def _validate_name(name):
if name and len(name) > MAX_NAME_LEN:
raise Exception("Logical switch name exceeds %d characters",
MAX_NAME_LEN)
def _validate_resource(body):
_validate_name(body.get('display_name'))
class FakeClient:
LSWITCH_RESOURCE = 'lswitch'
LPORT_RESOURCE = 'lport'
LROUTER_RESOURCE = 'lrouter'
NAT_RESOURCE = 'nat'
LQUEUE_RESOURCE = 'lqueue'
SECPROF_RESOURCE = 'securityprofile'
LSWITCH_STATUS = 'lswitchstatus'
LROUTER_STATUS = 'lrouterstatus'
LSWITCH_LPORT_RESOURCE = 'lswitch_lport'
LROUTER_LPORT_RESOURCE = 'lrouter_lport'
LROUTER_NAT_RESOURCE = 'lrouter_nat'
LSWITCH_LPORT_STATUS = 'lswitch_lportstatus'
LSWITCH_LPORT_ATT = 'lswitch_lportattachment'
LROUTER_LPORT_STATUS = 'lrouter_lportstatus'
LROUTER_LPORT_ATT = 'lrouter_lportattachment'
GWSERVICE_RESOURCE = 'gatewayservice'
RESOURCES = [LSWITCH_RESOURCE, LROUTER_RESOURCE, LQUEUE_RESOURCE,
LPORT_RESOURCE, NAT_RESOURCE, SECPROF_RESOURCE,
GWSERVICE_RESOURCE]
FAKE_GET_RESPONSES = {
LSWITCH_RESOURCE: "fake_get_lswitch.json",
LSWITCH_LPORT_RESOURCE: "fake_get_lswitch_lport.json",
LSWITCH_LPORT_STATUS: "fake_get_lswitch_lport_status.json",
LSWITCH_LPORT_ATT: "fake_get_lswitch_lport_att.json",
LROUTER_RESOURCE: "fake_get_lrouter.json",
LROUTER_LPORT_RESOURCE: "fake_get_lrouter_lport.json",
LROUTER_LPORT_STATUS: "fake_get_lrouter_lport_status.json",
LROUTER_LPORT_ATT: "fake_get_lrouter_lport_att.json",
LROUTER_STATUS: "fake_get_lrouter_status.json",
LROUTER_NAT_RESOURCE: "fake_get_lrouter_nat.json",
SECPROF_RESOURCE: "fake_get_security_profile.json",
LQUEUE_RESOURCE: "fake_get_lqueue.json",
GWSERVICE_RESOURCE: "fake_get_gwservice.json"
}
FAKE_POST_RESPONSES = {
LSWITCH_RESOURCE: "fake_post_lswitch.json",
LROUTER_RESOURCE: "fake_post_lrouter.json",
LSWITCH_LPORT_RESOURCE: "fake_post_lswitch_lport.json",
LROUTER_LPORT_RESOURCE: "fake_post_lrouter_lport.json",
LROUTER_NAT_RESOURCE: "fake_post_lrouter_nat.json",
SECPROF_RESOURCE: "fake_post_security_profile.json",
LQUEUE_RESOURCE: "fake_post_lqueue.json",
GWSERVICE_RESOURCE: "fake_post_gwservice.json"
}
FAKE_PUT_RESPONSES = {
LSWITCH_RESOURCE: "fake_post_lswitch.json",
LROUTER_RESOURCE: "fake_post_lrouter.json",
LSWITCH_LPORT_RESOURCE: "fake_post_lswitch_lport.json",
LROUTER_LPORT_RESOURCE: "fake_post_lrouter_lport.json",
LROUTER_NAT_RESOURCE: "fake_post_lrouter_nat.json",
LSWITCH_LPORT_ATT: "fake_put_lswitch_lport_att.json",
LROUTER_LPORT_ATT: "fake_put_lrouter_lport_att.json",
SECPROF_RESOURCE: "fake_post_security_profile.json",
LQUEUE_RESOURCE: "fake_post_lqueue.json",
GWSERVICE_RESOURCE: "fake_post_gwservice.json"
}
MANAGED_RELATIONS = {
LSWITCH_RESOURCE: [],
LROUTER_RESOURCE: [],
LSWITCH_LPORT_RESOURCE: ['LogicalPortAttachment'],
LROUTER_LPORT_RESOURCE: ['LogicalPortAttachment'],
}
_validators = {
LSWITCH_RESOURCE: _validate_resource,
LSWITCH_LPORT_RESOURCE: _validate_resource,
LROUTER_LPORT_RESOURCE: _validate_resource,
SECPROF_RESOURCE: _validate_resource,
LQUEUE_RESOURCE: _validate_resource,
GWSERVICE_RESOURCE: _validate_resource
}
def __init__(self, fake_files_path):
self.fake_files_path = fake_files_path
self._fake_lswitch_dict = {}
self._fake_lrouter_dict = {}
self._fake_lswitch_lport_dict = {}
self._fake_lrouter_lport_dict = {}
self._fake_lrouter_nat_dict = {}
self._fake_lswitch_lportstatus_dict = {}
self._fake_lrouter_lportstatus_dict = {}
self._fake_securityprofile_dict = {}
self._fake_lqueue_dict = {}
self._fake_gatewayservice_dict = {}
def _get_tag(self, resource, scope):
tags = [tag['tag'] for tag in resource['tags']
if tag['scope'] == scope]
return len(tags) > 0 and tags[0]
def _get_filters(self, querystring):
if not querystring:
return (None, None, None, None)
params = urlparse.parse_qs(querystring)
tag_filter = None
attr_filter = None
if 'tag' in params and 'tag_scope' in params:
tag_filter = {'scope': params['tag_scope'][0],
'tag': params['tag'][0]}
elif 'uuid' in params:
attr_filter = {'uuid': params['uuid'][0]}
# Handle page length and page cursor parameter
page_len = params.get('_page_length')
page_cursor = params.get('_page_cursor')
if page_len:
page_len = int(page_len[0])
else:
# Explicitly set it to None (avoid 0 or empty list)
page_len = None
return (tag_filter, attr_filter, page_len, page_cursor)
def _add_lswitch(self, body):
fake_lswitch = json.loads(body)
fake_lswitch['uuid'] = uuidutils.generate_uuid()
self._fake_lswitch_dict[fake_lswitch['uuid']] = fake_lswitch
# put the tenant_id and the zone_uuid in the main dict
# for simplyfying templating
zone_uuid = fake_lswitch['transport_zones'][0]['zone_uuid']
fake_lswitch['zone_uuid'] = zone_uuid
fake_lswitch['tenant_id'] = self._get_tag(fake_lswitch, 'os_tid')
fake_lswitch['lport_count'] = 0
# set status value
fake_lswitch['status'] = 'true'
return fake_lswitch
def _build_lrouter(self, body, uuid=None):
fake_lrouter = json.loads(body)
if uuid:
fake_lrouter['uuid'] = uuid
fake_lrouter['tenant_id'] = self._get_tag(fake_lrouter, 'os_tid')
default_nexthop = fake_lrouter['routing_config'].get(
'default_route_next_hop')
if default_nexthop:
fake_lrouter['default_next_hop'] = default_nexthop.get(
'gateway_ip_address', '0.0.0.0')
else:
fake_lrouter['default_next_hop'] = '0.0.0.0'
# NOTE(salv-orlando): We won't make the Fake NSX API client
# aware of NSX version. The long term plan is to replace it
# with behavioral mocking of NSX API requests
if 'distributed' not in fake_lrouter:
fake_lrouter['distributed'] = False
distributed_json = ('"distributed": %s,' %
str(fake_lrouter['distributed']).lower())
fake_lrouter['distributed_json'] = distributed_json
return fake_lrouter
def _add_lrouter(self, body):
fake_lrouter = self._build_lrouter(body,
uuidutils.generate_uuid())
self._fake_lrouter_dict[fake_lrouter['uuid']] = fake_lrouter
fake_lrouter['lport_count'] = 0
# set status value
fake_lrouter['status'] = 'true'
return fake_lrouter
def _add_lqueue(self, body):
fake_lqueue = json.loads(body)
fake_lqueue['uuid'] = uuidutils.generate_uuid()
self._fake_lqueue_dict[fake_lqueue['uuid']] = fake_lqueue
return fake_lqueue
def _add_lswitch_lport(self, body, ls_uuid):
fake_lport = json.loads(body)
new_uuid = uuidutils.generate_uuid()
fake_lport['uuid'] = new_uuid
# put the tenant_id and the ls_uuid in the main dict
# for simplyfying templating
fake_lport['ls_uuid'] = ls_uuid
fake_lport['tenant_id'] = self._get_tag(fake_lport, 'os_tid')
fake_lport['neutron_port_id'] = self._get_tag(fake_lport,
'q_port_id')
fake_lport['neutron_device_id'] = self._get_tag(fake_lport, 'vm_id')
fake_lport['att_type'] = "NoAttachment"
fake_lport['att_info_json'] = ''
self._fake_lswitch_lport_dict[fake_lport['uuid']] = fake_lport
fake_lswitch = self._fake_lswitch_dict[ls_uuid]
fake_lswitch['lport_count'] += 1
fake_lport_status = fake_lport.copy()
fake_lport_status['ls_tenant_id'] = fake_lswitch['tenant_id']
fake_lport_status['ls_uuid'] = fake_lswitch['uuid']
fake_lport_status['ls_name'] = fake_lswitch['display_name']
fake_lport_status['ls_zone_uuid'] = fake_lswitch['zone_uuid']
# set status value
fake_lport['status'] = 'true'
self._fake_lswitch_lportstatus_dict[new_uuid] = fake_lport_status
return fake_lport
def _build_lrouter_lport(self, body, new_uuid=None, lr_uuid=None):
fake_lport = json.loads(body)
if new_uuid:
fake_lport['uuid'] = new_uuid
# put the tenant_id and the le_uuid in the main dict
# for simplyfying templating
if lr_uuid:
fake_lport['lr_uuid'] = lr_uuid
fake_lport['tenant_id'] = self._get_tag(fake_lport, 'os_tid')
fake_lport['neutron_port_id'] = self._get_tag(fake_lport,
'q_port_id')
# replace ip_address with its json dump
if 'ip_addresses' in fake_lport:
ip_addresses_json = json.dumps(fake_lport['ip_addresses'])
fake_lport['ip_addresses_json'] = ip_addresses_json
return fake_lport
def _add_lrouter_lport(self, body, lr_uuid):
new_uuid = uuidutils.generate_uuid()
fake_lport = self._build_lrouter_lport(body, new_uuid, lr_uuid)
self._fake_lrouter_lport_dict[fake_lport['uuid']] = fake_lport
try:
fake_lrouter = self._fake_lrouter_dict[lr_uuid]
except KeyError:
raise api_exc.ResourceNotFound()
fake_lrouter['lport_count'] += 1
fake_lport_status = fake_lport.copy()
fake_lport_status['lr_tenant_id'] = fake_lrouter['tenant_id']
fake_lport_status['lr_uuid'] = fake_lrouter['uuid']
fake_lport_status['lr_name'] = fake_lrouter['display_name']
self._fake_lrouter_lportstatus_dict[new_uuid] = fake_lport_status
return fake_lport
def _add_securityprofile(self, body):
fake_securityprofile = json.loads(body)
fake_securityprofile['uuid'] = uuidutils.generate_uuid()
fake_securityprofile['tenant_id'] = self._get_tag(
fake_securityprofile, 'os_tid')
fake_securityprofile['nova_spid'] = self._get_tag(fake_securityprofile,
'nova_spid')
self._fake_securityprofile_dict[fake_securityprofile['uuid']] = (
fake_securityprofile)
return fake_securityprofile
def _add_lrouter_nat(self, body, lr_uuid):
fake_nat = json.loads(body)
new_uuid = uuidutils.generate_uuid()
fake_nat['uuid'] = new_uuid
fake_nat['lr_uuid'] = lr_uuid
self._fake_lrouter_nat_dict[fake_nat['uuid']] = fake_nat
if 'match' in fake_nat:
match_json = json.dumps(fake_nat['match'])
fake_nat['match_json'] = match_json
return fake_nat
def _add_gatewayservice(self, body):
fake_gwservice = json.loads(body)
fake_gwservice['uuid'] = str(uuidutils.generate_uuid())
fake_gwservice['tenant_id'] = self._get_tag(
fake_gwservice, 'os_tid')
# FIXME(salvatore-orlando): For simplicity we're managing only a
# single device. Extend the fake client for supporting multiple devices
first_gw = fake_gwservice['gateways'][0]
fake_gwservice['transport_node_uuid'] = first_gw['transport_node_uuid']
fake_gwservice['device_id'] = first_gw['device_id']
self._fake_gatewayservice_dict[fake_gwservice['uuid']] = (
fake_gwservice)
return fake_gwservice
def _build_relation(self, src, dst, resource_type, relation):
if relation not in self.MANAGED_RELATIONS[resource_type]:
return # Relation is not desired in output
if not '_relations' in src or not src['_relations'].get(relation):
return # Item does not have relation
relation_data = src['_relations'].get(relation)
dst_relations = dst.get('_relations', {})
dst_relations[relation] = relation_data
dst['_relations'] = dst_relations
def _fill_attachment(self, att_data, ls_uuid=None,
lr_uuid=None, lp_uuid=None):
new_data = att_data.copy()
for k in ('ls_uuid', 'lr_uuid', 'lp_uuid'):
if locals().get(k):
new_data[k] = locals()[k]
def populate_field(field_name):
if field_name in att_data:
new_data['%s_field' % field_name] = ('"%s" : "%s",'
% (field_name,
att_data[field_name]))
del new_data[field_name]
else:
new_data['%s_field' % field_name] = ""
for field in ['vif_uuid', 'peer_port_href', 'vlan_id',
'peer_port_uuid', 'l3_gateway_service_uuid']:
populate_field(field)
return new_data
def _get_resource_type(self, path):
"""Get resource type.
Identifies resource type and relevant uuids in the uri
/ws.v1/lswitch/xxx
/ws.v1/lswitch/xxx/status
/ws.v1/lswitch/xxx/lport/yyy
/ws.v1/lswitch/xxx/lport/yyy/status
/ws.v1/lrouter/zzz
/ws.v1/lrouter/zzz/status
/ws.v1/lrouter/zzz/lport/www
/ws.v1/lrouter/zzz/lport/www/status
/ws.v1/lqueue/xxx
"""
# The first element will always be 'ws.v1' - so we just discard it
uri_split = path.split('/')[1:]
# parse uri_split backwards
suffix = ""
idx = len(uri_split) - 1
if 'status' in uri_split[idx]:
suffix = "status"
idx = idx - 1
elif 'attachment' in uri_split[idx]:
suffix = "attachment"
idx = idx - 1
# then check if we have an uuid
uuids = []
if uri_split[idx].replace('-', '') not in self.RESOURCES:
uuids.append(uri_split[idx])
idx = idx - 1
resource_type = "%s%s" % (uri_split[idx], suffix)
if idx > 1:
uuids.insert(0, uri_split[idx - 1])
resource_type = "%s_%s" % (uri_split[idx - 2], resource_type)
return (resource_type.replace('-', ''), uuids)
def _list(self, resource_type, response_file,
parent_uuid=None, query=None, relations=None):
(tag_filter, attr_filter,
page_len, page_cursor) = self._get_filters(query)
# result_count attribute in response should appear only when
# page_cursor is not specified
do_result_count = not page_cursor
with open("%s/%s" % (self.fake_files_path, response_file)) as f:
response_template = f.read()
res_dict = getattr(self, '_fake_%s_dict' % resource_type)
if parent_uuid == '*':
parent_uuid = None
# NSX raises ResourceNotFound if lswitch doesn't exist and is not *
elif not res_dict and resource_type == self.LSWITCH_LPORT_RESOURCE:
raise api_exc.ResourceNotFound()
def _attr_match(res_uuid):
if not attr_filter:
return True
item = res_dict[res_uuid]
for (attr, value) in attr_filter.iteritems():
if item.get(attr) != value:
return False
return True
def _tag_match(res_uuid):
if not tag_filter:
return True
return any([x['scope'] == tag_filter['scope'] and
x['tag'] == tag_filter['tag']
for x in res_dict[res_uuid]['tags']])
def _lswitch_match(res_uuid):
# verify that the switch exist
if parent_uuid and not parent_uuid in self._fake_lswitch_dict:
raise Exception(_("lswitch:%s not found") % parent_uuid)
if (not parent_uuid
or res_dict[res_uuid].get('ls_uuid') == parent_uuid):
return True
return False
def _lrouter_match(res_uuid):
# verify that the router exist
if parent_uuid and not parent_uuid in self._fake_lrouter_dict:
raise Exception(_("lrouter:%s not found") % parent_uuid)
if (not parent_uuid or
res_dict[res_uuid].get('lr_uuid') == parent_uuid):
return True
return False
def _cursor_match(res_uuid, page_cursor):
if not page_cursor:
return True
if page_cursor == res_uuid:
# always return True once page_cursor has been found
page_cursor = None
return True
return False
def _build_item(resource):
item = json.loads(response_template % resource)
if relations:
for relation in relations:
self._build_relation(resource, item,
resource_type, relation)
return item
for item in res_dict.itervalues():
if 'tags' in item:
item['tags_json'] = json.dumps(item['tags'])
if resource_type in (self.LSWITCH_LPORT_RESOURCE,
self.LSWITCH_LPORT_ATT,
self.LSWITCH_LPORT_STATUS):
parent_func = _lswitch_match
elif resource_type in (self.LROUTER_LPORT_RESOURCE,
self.LROUTER_LPORT_ATT,
self.LROUTER_NAT_RESOURCE,
self.LROUTER_LPORT_STATUS):
parent_func = _lrouter_match
else:
parent_func = lambda x: True
items = [_build_item(res_dict[res_uuid])
for res_uuid in res_dict
if (parent_func(res_uuid) and
_tag_match(res_uuid) and
_attr_match(res_uuid) and
_cursor_match(res_uuid, page_cursor))]
# Rather inefficient, but hey this is just a mock!
next_cursor = None
total_items = len(items)
if page_len:
try:
next_cursor = items[page_len]['uuid']
except IndexError:
next_cursor = None
items = items[:page_len]
response_dict = {'results': items}
if next_cursor:
response_dict['page_cursor'] = next_cursor
if do_result_count:
response_dict['result_count'] = total_items
return json.dumps(response_dict)
def _show(self, resource_type, response_file,
uuid1, uuid2=None, relations=None):
target_uuid = uuid2 or uuid1
if resource_type.endswith('attachment'):
resource_type = resource_type[:resource_type.index('attachment')]
with open("%s/%s" % (self.fake_files_path, response_file)) as f:
response_template = f.read()
res_dict = getattr(self, '_fake_%s_dict' % resource_type)
for item in res_dict.itervalues():
if 'tags' in item:
item['tags_json'] = json.dumps(item['tags'])
# replace sec prof rules with their json dump
def jsonify_rules(rule_key):
if rule_key in item:
rules_json = json.dumps(item[rule_key])
item['%s_json' % rule_key] = rules_json
jsonify_rules('logical_port_egress_rules')
jsonify_rules('logical_port_ingress_rules')
items = [json.loads(response_template % res_dict[res_uuid])
for res_uuid in res_dict if res_uuid == target_uuid]
if items:
return json.dumps(items[0])
raise api_exc.ResourceNotFound()
def handle_get(self, url):
#TODO(salvatore-orlando): handle field selection
parsedurl = urlparse.urlparse(url)
(res_type, uuids) = self._get_resource_type(parsedurl.path)
relations = urlparse.parse_qs(parsedurl.query).get('relations')
response_file = self.FAKE_GET_RESPONSES.get(res_type)
if not response_file:
raise api_exc.NsxApiException()
if 'lport' in res_type or 'nat' in res_type:
if len(uuids) > 1:
return self._show(res_type, response_file, uuids[0],
uuids[1], relations=relations)
else:
return self._list(res_type, response_file, uuids[0],
query=parsedurl.query, relations=relations)
elif ('lswitch' in res_type or
'lrouter' in res_type or
self.SECPROF_RESOURCE in res_type or
self.LQUEUE_RESOURCE in res_type or
'gatewayservice' in res_type):
LOG.debug("UUIDS:%s", uuids)
if uuids:
return self._show(res_type, response_file, uuids[0],
relations=relations)
else:
return self._list(res_type, response_file,
query=parsedurl.query,
relations=relations)
else:
raise Exception("unknown resource:%s" % res_type)
def handle_post(self, url, body):
parsedurl = urlparse.urlparse(url)
(res_type, uuids) = self._get_resource_type(parsedurl.path)
response_file = self.FAKE_POST_RESPONSES.get(res_type)
if not response_file:
raise Exception("resource not found")
with open("%s/%s" % (self.fake_files_path, response_file)) as f:
response_template = f.read()
add_resource = getattr(self, '_add_%s' % res_type)
body_json = json.loads(body)
val_func = self._validators.get(res_type)
if val_func:
val_func(body_json)
args = [body]
if uuids:
args.append(uuids[0])
response = response_template % add_resource(*args)
return response
def handle_put(self, url, body):
parsedurl = urlparse.urlparse(url)
(res_type, uuids) = self._get_resource_type(parsedurl.path)
response_file = self.FAKE_PUT_RESPONSES.get(res_type)
if not response_file:
raise Exception("resource not found")
with open("%s/%s" % (self.fake_files_path, response_file)) as f:
response_template = f.read()
# Manage attachment operations
is_attachment = False
if res_type.endswith('attachment'):
is_attachment = True
res_type = res_type[:res_type.index('attachment')]
res_dict = getattr(self, '_fake_%s_dict' % res_type)
body_json = json.loads(body)
val_func = self._validators.get(res_type)
if val_func:
val_func(body_json)
try:
resource = res_dict[uuids[-1]]
except KeyError:
raise api_exc.ResourceNotFound()
if not is_attachment:
edit_resource = getattr(self, '_build_%s' % res_type, None)
if edit_resource:
body_json = edit_resource(body)
resource.update(body_json)
else:
relations = resource.get("_relations", {})
body_2 = json.loads(body)
resource['att_type'] = body_2['type']
relations['LogicalPortAttachment'] = body_2
resource['_relations'] = relations
if body_2['type'] == "PatchAttachment":
# We need to do a trick here
if self.LROUTER_RESOURCE in res_type:
res_type_2 = res_type.replace(self.LROUTER_RESOURCE,
self.LSWITCH_RESOURCE)
elif self.LSWITCH_RESOURCE in res_type:
res_type_2 = res_type.replace(self.LSWITCH_RESOURCE,
self.LROUTER_RESOURCE)
res_dict_2 = getattr(self, '_fake_%s_dict' % res_type_2)
body_2['peer_port_uuid'] = uuids[-1]
resource_2 = res_dict_2[json.loads(body)['peer_port_uuid']]
relations_2 = resource_2.get("_relations")
if not relations_2:
relations_2 = {}
relations_2['LogicalPortAttachment'] = body_2
resource_2['_relations'] = relations_2
resource['peer_port_uuid'] = body_2['peer_port_uuid']
resource['att_info_json'] = (
"\"peer_port_uuid\": \"%s\"," %
resource_2['uuid'])
resource_2['att_info_json'] = (
"\"peer_port_uuid\": \"%s\"," %
body_2['peer_port_uuid'])
elif body_2['type'] == "L3GatewayAttachment":
resource['attachment_gwsvc_uuid'] = (
body_2['l3_gateway_service_uuid'])
resource['vlan_id'] = body_2.get('vlan_id')
elif body_2['type'] == "L2GatewayAttachment":
resource['attachment_gwsvc_uuid'] = (
body_2['l2_gateway_service_uuid'])
elif body_2['type'] == "VifAttachment":
resource['vif_uuid'] = body_2['vif_uuid']
resource['att_info_json'] = (
"\"vif_uuid\": \"%s\"," % body_2['vif_uuid'])
if not is_attachment:
response = response_template % resource
else:
if res_type == self.LROUTER_LPORT_RESOURCE:
lr_uuid = uuids[0]
ls_uuid = None
elif res_type == self.LSWITCH_LPORT_RESOURCE:
ls_uuid = uuids[0]
lr_uuid = None
lp_uuid = uuids[1]
response = response_template % self._fill_attachment(
json.loads(body), ls_uuid, lr_uuid, lp_uuid)
return response
def handle_delete(self, url):
parsedurl = urlparse.urlparse(url)
(res_type, uuids) = self._get_resource_type(parsedurl.path)
response_file = self.FAKE_PUT_RESPONSES.get(res_type)
if not response_file:
raise Exception("resource not found")
res_dict = getattr(self, '_fake_%s_dict' % res_type)
try:
del res_dict[uuids[-1]]
except KeyError:
raise api_exc.ResourceNotFound()
return ""
def fake_request(self, *args, **kwargs):
method = args[0]
handler = getattr(self, "handle_%s" % method.lower())
return handler(*args[1:])
def reset_all(self):
self._fake_lswitch_dict.clear()
self._fake_lrouter_dict.clear()
self._fake_lswitch_lport_dict.clear()
self._fake_lrouter_lport_dict.clear()
self._fake_lswitch_lportstatus_dict.clear()
self._fake_lrouter_lportstatus_dict.clear()
self._fake_lqueue_dict.clear()
self._fake_securityprofile_dict.clear()
self._fake_gatewayservice_dict.clear()
| apache-2.0 |
camilonova/django | tests/backends/test_mysql.py | 25 | 3531 | import unittest
from contextlib import contextmanager
from django.core.exceptions import ImproperlyConfigured
from django.db import connection
from django.test import TestCase, override_settings
@contextmanager
def get_connection():
new_connection = connection.copy()
yield new_connection
new_connection.close()
@override_settings(DEBUG=True)
@unittest.skipUnless(connection.vendor == 'mysql', 'MySQL specific test.')
class MySQLTests(TestCase):
read_committed = 'read committed'
repeatable_read = 'repeatable read'
isolation_values = {
level: level.replace(' ', '-').upper()
for level in (read_committed, repeatable_read)
}
@classmethod
def setUpClass(cls):
super().setUpClass()
configured_isolation_level = connection.isolation_level or cls.isolation_values[cls.repeatable_read]
cls.configured_isolation_level = configured_isolation_level.upper()
cls.other_isolation_level = (
cls.read_committed
if configured_isolation_level != cls.isolation_values[cls.read_committed]
else cls.repeatable_read
)
@staticmethod
def get_isolation_level(connection):
with connection.cursor() as cursor:
cursor.execute("SELECT @@session.tx_isolation")
return cursor.fetchone()[0]
def test_auto_is_null_auto_config(self):
query = 'set sql_auto_is_null = 0'
connection.init_connection_state()
last_query = connection.queries[-1]['sql'].lower()
if connection.features.is_sql_auto_is_null_enabled:
self.assertIn(query, last_query)
else:
self.assertNotIn(query, last_query)
def test_connect_isolation_level(self):
self.assertEqual(self.get_isolation_level(connection), self.configured_isolation_level)
def test_setting_isolation_level(self):
with get_connection() as new_connection:
new_connection.settings_dict['OPTIONS']['isolation_level'] = self.other_isolation_level
self.assertEqual(
self.get_isolation_level(new_connection),
self.isolation_values[self.other_isolation_level]
)
def test_uppercase_isolation_level(self):
# Upper case values are also accepted in 'isolation_level'.
with get_connection() as new_connection:
new_connection.settings_dict['OPTIONS']['isolation_level'] = self.other_isolation_level.upper()
self.assertEqual(
self.get_isolation_level(new_connection),
self.isolation_values[self.other_isolation_level]
)
def test_default_isolation_level(self):
# If not specified in settings, the default is read committed.
with get_connection() as new_connection:
new_connection.settings_dict['OPTIONS'].pop('isolation_level', None)
self.assertEqual(
self.get_isolation_level(new_connection),
self.isolation_values[self.read_committed]
)
def test_isolation_level_validation(self):
new_connection = connection.copy()
new_connection.settings_dict['OPTIONS']['isolation_level'] = 'xxx'
msg = (
"Invalid transaction isolation level 'xxx' specified.\n"
"Use one of 'read committed', 'read uncommitted', "
"'repeatable read', 'serializable', or None."
)
with self.assertRaisesMessage(ImproperlyConfigured, msg):
new_connection.cursor()
| bsd-3-clause |
binarydud/django-oscar | src/oscar/apps/offer/models.py | 23 | 1817 | from oscar.core.loading import is_model_registered
from oscar.apps.offer.results import (
BasketDiscount, ShippingDiscount, PostOrderAction,
SHIPPING_DISCOUNT, ZERO_DISCOUNT)
from oscar.apps.offer.abstract_models import (
AbstractBenefit, AbstractCondition, AbstractConditionalOffer,
AbstractRange, AbstractRangeProduct, AbstractRangeProductFileUpload)
__all__ = [
'BasketDiscount', 'ShippingDiscount', 'PostOrderAction',
'SHIPPING_DISCOUNT', 'ZERO_DISCOUNT'
]
if not is_model_registered('offer', 'ConditionalOffer'):
class ConditionalOffer(AbstractConditionalOffer):
pass
__all__.append('ConditionalOffer')
if not is_model_registered('offer', 'Benefit'):
class Benefit(AbstractBenefit):
pass
__all__.append('Benefit')
if not is_model_registered('offer', 'Condition'):
class Condition(AbstractCondition):
pass
__all__.append('Condition')
if not is_model_registered('offer', 'Range'):
class Range(AbstractRange):
pass
__all__.append('Range')
if not is_model_registered('offer', 'RangeProduct'):
class RangeProduct(AbstractRangeProduct):
pass
__all__.append('RangeProduct')
if not is_model_registered('offer', 'RangeProductFileUpload'):
class RangeProductFileUpload(AbstractRangeProductFileUpload):
pass
__all__.append('RangeProductFileUpload')
# Import the benefits and the conditions. Required after initializing the
# parent models to allow overriding them
from oscar.apps.offer.benefits import * # noqa
from oscar.apps.offer.conditions import * # noqa
from oscar.apps.offer.benefits import __all__ as benefit_classes # noqa
from oscar.apps.offer.conditions import __all__ as condition_classes # noqa
__all__.extend(benefit_classes)
__all__.extend(condition_classes)
| bsd-3-clause |
antinet/cjdns | node_build/dependencies/libuv/build/gyp/test/msvs/external_builder/gyptest-all.py | 260 | 1878 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that msvs_external_builder being set will invoke the provided
msvs_external_builder_build_cmd and msvs_external_builder_clean_cmd, and will
not invoke MSBuild actions and rules.
"""
import os
import sys
import TestGyp
if int(os.environ.get('GYP_MSVS_VERSION', 0)) < 2010:
sys.exit(0)
test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
# without the flag set
test.run_gyp('external.gyp')
test.build('external.gyp', target='external')
test.must_not_exist('external_builder.out')
test.must_exist('msbuild_rule.out')
test.must_exist('msbuild_action.out')
test.must_match('msbuild_rule.out', 'msbuild_rule.py hello.z a b c')
test.must_match('msbuild_action.out', 'msbuild_action.py x y z')
os.remove('msbuild_rule.out')
os.remove('msbuild_action.out')
# with the flag set, using Build
try:
os.environ['GYP_DEFINES'] = 'use_external_builder=1'
test.run_gyp('external.gyp')
test.build('external.gyp', target='external')
finally:
del os.environ['GYP_DEFINES']
test.must_not_exist('msbuild_rule.out')
test.must_not_exist('msbuild_action.out')
test.must_exist('external_builder.out')
test.must_match('external_builder.out', 'external_builder.py build 1 2 3')
os.remove('external_builder.out')
# with the flag set, using Clean
try:
os.environ['GYP_DEFINES'] = 'use_external_builder=1'
test.run_gyp('external.gyp')
test.build('external.gyp', target='external', clean=True)
finally:
del os.environ['GYP_DEFINES']
test.must_not_exist('msbuild_rule.out')
test.must_not_exist('msbuild_action.out')
test.must_exist('external_builder.out')
test.must_match('external_builder.out', 'external_builder.py clean 4 5')
os.remove('external_builder.out')
test.pass_test()
| gpl-3.0 |
Immortalin/python-for-android | python-modules/twisted/twisted/test/test_htb.py | 101 | 2816 | # -*- Python -*-
__version__ = '$Revision: 1.3 $'[11:-2]
from twisted.trial import unittest
from twisted.protocols import htb
class DummyClock:
time = 0
def set(self, when):
self.time = when
def __call__(self):
return self.time
class SomeBucket(htb.Bucket):
maxburst = 100
rate = 2
class TestBucketBase(unittest.TestCase):
def setUp(self):
self._realTimeFunc = htb.time
self.clock = DummyClock()
htb.time = self.clock
def tearDown(self):
htb.time = self._realTimeFunc
class TestBucket(TestBucketBase):
def testBucketSize(self):
"""Testing the size of the bucket."""
b = SomeBucket()
fit = b.add(1000)
self.failUnlessEqual(100, fit)
def testBucketDrian(self):
"""Testing the bucket's drain rate."""
b = SomeBucket()
fit = b.add(1000)
self.clock.set(10)
fit = b.add(1000)
self.failUnlessEqual(20, fit)
class TestBucketNesting(TestBucketBase):
def setUp(self):
TestBucketBase.setUp(self)
self.parent = SomeBucket()
self.child1 = SomeBucket(self.parent)
self.child2 = SomeBucket(self.parent)
def testBucketParentSize(self):
# Use up most of the parent bucket.
self.child1.add(90)
fit = self.child2.add(90)
self.failUnlessEqual(10, fit)
def testBucketParentRate(self):
# Make the parent bucket drain slower.
self.parent.rate = 1
# Fill both child1 and parent.
self.child1.add(100)
self.clock.set(10)
fit = self.child1.add(100)
# How much room was there? The child bucket would have had 20,
# but the parent bucket only ten (so no, it wouldn't make too much
# sense to have a child bucket draining faster than its parent in a real
# application.)
self.failUnlessEqual(10, fit)
# TODO: Test the Transport stuff?
from test_pcp import DummyConsumer
class ConsumerShaperTest(TestBucketBase):
def setUp(self):
TestBucketBase.setUp(self)
self.underlying = DummyConsumer()
self.bucket = SomeBucket()
self.shaped = htb.ShapedConsumer(self.underlying, self.bucket)
def testRate(self):
# Start off with a full bucket, so the burst-size dosen't factor in
# to the calculations.
delta_t = 10
self.bucket.add(100)
self.shaped.write("x" * 100)
self.clock.set(delta_t)
self.shaped.resumeProducing()
self.failUnlessEqual(len(self.underlying.getvalue()),
delta_t * self.bucket.rate)
def testBucketRefs(self):
self.failUnlessEqual(self.bucket._refcount, 1)
self.shaped.stopProducing()
self.failUnlessEqual(self.bucket._refcount, 0)
| apache-2.0 |
stevereyes01/pycbc | pycbc/workflow/psd.py | 10 | 4901 | # Copyright (C) 2013 Ian Harry
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""This module is responsible for setting up PSD-related jobs in workflows.
"""
from six.moves import range
from pycbc.workflow.core import FileList, make_analysis_dir, Executable, File
from pycbc.workflow.core import SegFile
from ligo.segments import segmentlist
class CalcPSDExecutable(Executable):
current_retention_level = Executable.ALL_TRIGGERS
file_input_options = ['--gating-file']
class MergePSDFiles(Executable):
current_retention_level = Executable.MERGED_TRIGGERS
def chunks(l, n):
""" Yield n successive chunks from l.
"""
newn = int(len(l) / n)
for i in range(0, n-1):
yield l[i*newn:i*newn+newn]
yield l[n*newn-newn:]
def merge_psds(workflow, files, ifo, out_dir, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
node = MergePSDFiles(workflow.cp, 'merge_psds',
ifos=ifo, out_dir=out_dir,
tags=tags).create_node()
node.add_input_list_opt('--psd-files', files)
node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file')
workflow += node
return node.output_files[0]
def setup_psd_calculate(workflow, frame_files, ifo, segments,
segment_name, out_dir, tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
if workflow.cp.has_option_tags('workflow-psd', 'parallelization-factor', tags=tags):
num_parts = int(workflow.cp.get_opt_tags('workflow-psd',
'parallelization-factor',
tags=tags))
else:
num_parts = 1
# get rid of duplicate segments which happen when splitting the bank
segments = segmentlist(frozenset(segments))
segment_lists = list(chunks(segments, num_parts))
psd_files = FileList([])
for i, segs in enumerate(segment_lists):
seg_file = SegFile.from_segment_list('%s_%s' %(segment_name, i),
segmentlist(segs), segment_name, ifo,
valid_segment=workflow.analysis_time,
extension='xml', directory=out_dir)
psd_files += [make_psd_file(workflow, frame_files, seg_file,
segment_name, out_dir,
tags=tags + ['PART%s' % i])]
if num_parts > 1:
return merge_psds(workflow, psd_files, ifo, out_dir, tags=tags)
else:
return psd_files[0]
def make_psd_file(workflow, frame_files, segment_file, segment_name, out_dir,
tags=None):
make_analysis_dir(out_dir)
tags = [] if not tags else tags
exe = CalcPSDExecutable(workflow.cp, 'calculate_psd',
ifos=segment_file.ifo, out_dir=out_dir,
tags=tags)
node = exe.create_node()
node.add_input_opt('--analysis-segment-file', segment_file)
node.add_opt('--segment-name', segment_name)
if frame_files and not exe.has_opt('frame-type'):
node.add_input_list_opt('--frame-files', frame_files)
node.new_output_file_opt(workflow.analysis_time, '.hdf', '--output-file')
workflow += node
return node.output_files[0]
class AvgPSDExecutable(Executable):
current_retention_level = Executable.FINAL_RESULT
def make_average_psd(workflow, psd_files, out_dir, tags=None,
output_fmt='.txt'):
make_analysis_dir(out_dir)
tags = [] if tags is None else tags
node = AvgPSDExecutable(workflow.cp, 'average_psd', ifos=workflow.ifos,
out_dir=out_dir, tags=tags).create_node()
node.add_input_list_opt('--input-files', psd_files)
if len(workflow.ifos) > 1:
node.new_output_file_opt(workflow.analysis_time, output_fmt,
'--detector-avg-file')
node.new_multiifo_output_list_opt('--time-avg-file', workflow.ifos,
workflow.analysis_time, output_fmt, tags=tags)
workflow += node
return node.output_files
# keep namespace clean
__all__ = ['make_psd_file', 'make_average_psd', 'setup_psd_calculate', 'merge_psds']
| gpl-3.0 |
domibarton/ansible | lib/ansible/compat/six/__init__.py | 82 | 1777 | # (c) 2014, Toshio Kuratomi <tkuratomi@ansible.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat six library. RHEL7 has python-six 1.3.0 which is too old
'''
# The following makes it easier for us to script updates of the bundled code
_BUNDLED_METADATA = { "pypi_name": "six", "version": "1.10.0" }
import os.path
try:
import six as _system_six
except ImportError:
_system_six = None
if _system_six:
# If we need some things from even newer versions of six, then we need to
# use our bundled copy instead
if ( # Added in six-1.8.0
not hasattr(_system_six.moves, 'shlex_quote') or
# Added in six-1.4.0
not hasattr(_system_six, 'byte2int') or
not hasattr(_system_six, 'add_metaclass') or
not hasattr(_system_six.moves, 'urllib')
):
_system_six = False
if _system_six:
six = _system_six
else:
from . import _six as six
six_py_file = '{0}.py'.format(os.path.splitext(six.__file__)[0])
exec(open(six_py_file, 'rb').read())
| gpl-3.0 |
makermade/arm_android-21_arm-linux-androideabi-4.8 | lib/python2.7/lib-tk/Tkinter.py | 43 | 157840 | """Wrapper functions for Tcl/Tk.
Tkinter provides classes which allow the display, positioning and
control of widgets. Toplevel widgets are Tk and Toplevel. Other
widgets are Frame, Label, Entry, Text, Canvas, Button, Radiobutton,
Checkbutton, Scale, Listbox, Scrollbar, OptionMenu, Spinbox
LabelFrame and PanedWindow.
Properties of the widgets are specified with keyword arguments.
Keyword arguments have the same name as the corresponding resource
under Tk.
Widgets are positioned with one of the geometry managers Place, Pack
or Grid. These managers can be called with methods place, pack, grid
available in every Widget.
Actions are bound to events by resources (e.g. keyword argument
command) or with the method bind.
Example (Hello, World):
import Tkinter
from Tkconstants import *
tk = Tkinter.Tk()
frame = Tkinter.Frame(tk, relief=RIDGE, borderwidth=2)
frame.pack(fill=BOTH,expand=1)
label = Tkinter.Label(frame, text="Hello, World")
label.pack(fill=X, expand=1)
button = Tkinter.Button(frame,text="Exit",command=tk.destroy)
button.pack(side=BOTTOM)
tk.mainloop()
"""
__version__ = "$Revision: 81008 $"
import sys
if sys.platform == "win32":
# Attempt to configure Tcl/Tk without requiring PATH
import FixTk
import _tkinter # If this fails your Python may not be configured for Tk
tkinter = _tkinter # b/w compat for export
TclError = _tkinter.TclError
from types import *
from Tkconstants import *
import re
wantobjects = 1
TkVersion = float(_tkinter.TK_VERSION)
TclVersion = float(_tkinter.TCL_VERSION)
READABLE = _tkinter.READABLE
WRITABLE = _tkinter.WRITABLE
EXCEPTION = _tkinter.EXCEPTION
# These are not always defined, e.g. not on Win32 with Tk 8.0 :-(
try: _tkinter.createfilehandler
except AttributeError: _tkinter.createfilehandler = None
try: _tkinter.deletefilehandler
except AttributeError: _tkinter.deletefilehandler = None
_magic_re = re.compile(r'([\\{}])')
_space_re = re.compile(r'([\s])')
def _join(value):
"""Internal function."""
return ' '.join(map(_stringify, value))
def _stringify(value):
"""Internal function."""
if isinstance(value, (list, tuple)):
if len(value) == 1:
value = _stringify(value[0])
if value[0] == '{':
value = '{%s}' % value
else:
value = '{%s}' % _join(value)
else:
if isinstance(value, basestring):
value = unicode(value)
else:
value = str(value)
if not value:
value = '{}'
elif _magic_re.search(value):
# add '\' before special characters and spaces
value = _magic_re.sub(r'\\\1', value)
value = _space_re.sub(r'\\\1', value)
elif value[0] == '"' or _space_re.search(value):
value = '{%s}' % value
return value
def _flatten(tuple):
"""Internal function."""
res = ()
for item in tuple:
if type(item) in (TupleType, ListType):
res = res + _flatten(item)
elif item is not None:
res = res + (item,)
return res
try: _flatten = _tkinter._flatten
except AttributeError: pass
def _cnfmerge(cnfs):
"""Internal function."""
if type(cnfs) is DictionaryType:
return cnfs
elif type(cnfs) in (NoneType, StringType):
return cnfs
else:
cnf = {}
for c in _flatten(cnfs):
try:
cnf.update(c)
except (AttributeError, TypeError), msg:
print "_cnfmerge: fallback due to:", msg
for k, v in c.items():
cnf[k] = v
return cnf
try: _cnfmerge = _tkinter._cnfmerge
except AttributeError: pass
class Event:
"""Container for the properties of an event.
Instances of this type are generated if one of the following events occurs:
KeyPress, KeyRelease - for keyboard events
ButtonPress, ButtonRelease, Motion, Enter, Leave, MouseWheel - for mouse events
Visibility, Unmap, Map, Expose, FocusIn, FocusOut, Circulate,
Colormap, Gravity, Reparent, Property, Destroy, Activate,
Deactivate - for window events.
If a callback function for one of these events is registered
using bind, bind_all, bind_class, or tag_bind, the callback is
called with an Event as first argument. It will have the
following attributes (in braces are the event types for which
the attribute is valid):
serial - serial number of event
num - mouse button pressed (ButtonPress, ButtonRelease)
focus - whether the window has the focus (Enter, Leave)
height - height of the exposed window (Configure, Expose)
width - width of the exposed window (Configure, Expose)
keycode - keycode of the pressed key (KeyPress, KeyRelease)
state - state of the event as a number (ButtonPress, ButtonRelease,
Enter, KeyPress, KeyRelease,
Leave, Motion)
state - state as a string (Visibility)
time - when the event occurred
x - x-position of the mouse
y - y-position of the mouse
x_root - x-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
y_root - y-position of the mouse on the screen
(ButtonPress, ButtonRelease, KeyPress, KeyRelease, Motion)
char - pressed character (KeyPress, KeyRelease)
send_event - see X/Windows documentation
keysym - keysym of the event as a string (KeyPress, KeyRelease)
keysym_num - keysym of the event as a number (KeyPress, KeyRelease)
type - type of the event as a number
widget - widget in which the event occurred
delta - delta of wheel movement (MouseWheel)
"""
pass
_support_default_root = 1
_default_root = None
def NoDefaultRoot():
"""Inhibit setting of default root window.
Call this function to inhibit that the first instance of
Tk is used for windows without an explicit parent window.
"""
global _support_default_root
_support_default_root = 0
global _default_root
_default_root = None
del _default_root
def _tkerror(err):
"""Internal function."""
pass
def _exit(code=0):
"""Internal function. Calling it will raise the exception SystemExit."""
try:
code = int(code)
except ValueError:
pass
raise SystemExit, code
_varnum = 0
class Variable:
"""Class to define value holders for e.g. buttons.
Subclasses StringVar, IntVar, DoubleVar, BooleanVar are specializations
that constrain the type of the value returned from get()."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a variable
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
global _varnum
if not master:
master = _default_root
self._master = master
self._tk = master.tk
if name:
self._name = name
else:
self._name = 'PY_VAR' + repr(_varnum)
_varnum += 1
if value is not None:
self.set(value)
elif not self._tk.call("info", "exists", self._name):
self.set(self._default)
def __del__(self):
"""Unset the variable in Tcl."""
self._tk.globalunsetvar(self._name)
def __str__(self):
"""Return the name of the variable in Tcl."""
return self._name
def set(self, value):
"""Set the variable to VALUE."""
return self._tk.globalsetvar(self._name, value)
def get(self):
"""Return value of variable."""
return self._tk.globalgetvar(self._name)
def trace_variable(self, mode, callback):
"""Define a trace callback for the variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CALLBACK must be a function which is called when
the variable is read, written or undefined.
Return the name of the callback.
"""
cbname = self._master._register(callback)
self._tk.call("trace", "variable", self._name, mode, cbname)
return cbname
trace = trace_variable
def trace_vdelete(self, mode, cbname):
"""Delete the trace callback for a variable.
MODE is one of "r", "w", "u" for read, write, undefine.
CBNAME is the name of the callback returned from trace_variable or trace.
"""
self._tk.call("trace", "vdelete", self._name, mode, cbname)
self._master.deletecommand(cbname)
def trace_vinfo(self):
"""Return all trace callback information."""
return map(self._tk.split, self._tk.splitlist(
self._tk.call("trace", "vinfo", self._name)))
def __eq__(self, other):
"""Comparison for equality (==).
Note: if the Variable's master matters to behavior
also compare self._master == other._master
"""
return self.__class__.__name__ == other.__class__.__name__ \
and self._name == other._name
class StringVar(Variable):
"""Value holder for strings variables."""
_default = ""
def __init__(self, master=None, value=None, name=None):
"""Construct a string variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to "")
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return value of variable as string."""
value = self._tk.globalgetvar(self._name)
if isinstance(value, basestring):
return value
return str(value)
class IntVar(Variable):
"""Value holder for integer variables."""
_default = 0
def __init__(self, master=None, value=None, name=None):
"""Construct an integer variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def set(self, value):
"""Set the variable to value, converting booleans to integers."""
if isinstance(value, bool):
value = int(value)
return Variable.set(self, value)
def get(self):
"""Return the value of the variable as an integer."""
return getint(self._tk.globalgetvar(self._name))
class DoubleVar(Variable):
"""Value holder for float variables."""
_default = 0.0
def __init__(self, master=None, value=None, name=None):
"""Construct a float variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to 0.0)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a float."""
return getdouble(self._tk.globalgetvar(self._name))
class BooleanVar(Variable):
"""Value holder for boolean variables."""
_default = False
def __init__(self, master=None, value=None, name=None):
"""Construct a boolean variable.
MASTER can be given as master widget.
VALUE is an optional value (defaults to False)
NAME is an optional Tcl name (defaults to PY_VARnum).
If NAME matches an existing variable and VALUE is omitted
then the existing value is retained.
"""
Variable.__init__(self, master, value, name)
def get(self):
"""Return the value of the variable as a bool."""
return self._tk.getboolean(self._tk.globalgetvar(self._name))
def mainloop(n=0):
"""Run the main loop of Tcl."""
_default_root.tk.mainloop(n)
getint = int
getdouble = float
def getboolean(s):
"""Convert true and false to integer values 1 and 0."""
return _default_root.tk.getboolean(s)
# Methods defined on both toplevel and interior widgets
class Misc:
"""Internal class.
Base class which defines methods common for interior widgets."""
# XXX font command?
_tclCommands = None
def destroy(self):
"""Internal function.
Delete all Tcl commands created for
this widget in the Tcl interpreter."""
if self._tclCommands is not None:
for name in self._tclCommands:
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
self._tclCommands = None
def deletecommand(self, name):
"""Internal function.
Delete the Tcl command provided in NAME."""
#print '- Tkinter: deleted command', name
self.tk.deletecommand(name)
try:
self._tclCommands.remove(name)
except ValueError:
pass
def tk_strictMotif(self, boolean=None):
"""Set Tcl internal variable, whether the look and feel
should adhere to Motif.
A parameter of 1 means adhere to Motif (e.g. no color
change if mouse passes over slider).
Returns the set value."""
return self.tk.getboolean(self.tk.call(
'set', 'tk_strictMotif', boolean))
def tk_bisque(self):
"""Change the color scheme to light brown as used in Tk 3.6 and before."""
self.tk.call('tk_bisque')
def tk_setPalette(self, *args, **kw):
"""Set a new color scheme for all widget elements.
A single color as argument will cause that all colors of Tk
widget elements are derived from this.
Alternatively several keyword parameters and its associated
colors can be given. The following keywords are valid:
activeBackground, foreground, selectColor,
activeForeground, highlightBackground, selectBackground,
background, highlightColor, selectForeground,
disabledForeground, insertBackground, troughColor."""
self.tk.call(('tk_setPalette',)
+ _flatten(args) + _flatten(kw.items()))
def tk_menuBar(self, *args):
"""Do not use. Needed in Tk 3.6 and earlier."""
pass # obsolete since Tk 4.0
def wait_variable(self, name='PY_VAR'):
"""Wait until the variable is modified.
A parameter of type IntVar, StringVar, DoubleVar or
BooleanVar must be given."""
self.tk.call('tkwait', 'variable', name)
waitvar = wait_variable # XXX b/w compat
def wait_window(self, window=None):
"""Wait until a WIDGET is destroyed.
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'window', window._w)
def wait_visibility(self, window=None):
"""Wait until the visibility of a WIDGET changes
(e.g. it appears).
If no parameter is given self is used."""
if window is None:
window = self
self.tk.call('tkwait', 'visibility', window._w)
def setvar(self, name='PY_VAR', value='1'):
"""Set Tcl variable NAME to VALUE."""
self.tk.setvar(name, value)
def getvar(self, name='PY_VAR'):
"""Return value of Tcl variable NAME."""
return self.tk.getvar(name)
getint = int
getdouble = float
def getboolean(self, s):
"""Return a boolean value for Tcl boolean values true and false given as parameter."""
return self.tk.getboolean(s)
def focus_set(self):
"""Direct input focus to this widget.
If the application currently does not have the focus
this widget will get the focus if the application gets
the focus through the window manager."""
self.tk.call('focus', self._w)
focus = focus_set # XXX b/w compat?
def focus_force(self):
"""Direct input focus to this widget even if the
application does not have the focus. Use with
caution!"""
self.tk.call('focus', '-force', self._w)
def focus_get(self):
"""Return the widget which has currently the focus in the
application.
Use focus_displayof to allow working with several
displays. Return None if application does not have
the focus."""
name = self.tk.call('focus')
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_displayof(self):
"""Return the widget which has currently the focus on the
display where this widget is located.
Return None if the application does not have the focus."""
name = self.tk.call('focus', '-displayof', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def focus_lastfor(self):
"""Return the widget which would have the focus if top level
for this widget gets the focus from the window manager."""
name = self.tk.call('focus', '-lastfor', self._w)
if name == 'none' or not name: return None
return self._nametowidget(name)
def tk_focusFollowsMouse(self):
"""The widget under mouse will get automatically focus. Can not
be disabled easily."""
self.tk.call('tk_focusFollowsMouse')
def tk_focusNext(self):
"""Return the next widget in the focus order which follows
widget which has currently the focus.
The focus order first goes to the next child, then to
the children of the child recursively and then to the
next sibling which is higher in the stacking order. A
widget is omitted if it has the takefocus resource set
to 0."""
name = self.tk.call('tk_focusNext', self._w)
if not name: return None
return self._nametowidget(name)
def tk_focusPrev(self):
"""Return previous widget in the focus order. See tk_focusNext for details."""
name = self.tk.call('tk_focusPrev', self._w)
if not name: return None
return self._nametowidget(name)
def after(self, ms, func=None, *args):
"""Call function once after given time.
MS specifies the time in milliseconds. FUNC gives the
function which shall be called. Additional parameters
are given as parameters to the function call. Return
identifier to cancel scheduling with after_cancel."""
if not func:
# I'd rather use time.sleep(ms*0.001)
self.tk.call('after', ms)
else:
def callit():
try:
func(*args)
finally:
try:
self.deletecommand(name)
except TclError:
pass
name = self._register(callit)
return self.tk.call('after', ms, name)
def after_idle(self, func, *args):
"""Call FUNC once if the Tcl main loop has no event to
process.
Return an identifier to cancel the scheduling with
after_cancel."""
return self.after('idle', func, *args)
def after_cancel(self, id):
"""Cancel scheduling of function identified with ID.
Identifier returned by after or after_idle must be
given as first parameter."""
try:
data = self.tk.call('after', 'info', id)
# In Tk 8.3, splitlist returns: (script, type)
# In Tk 8.4, splitlist may return (script, type) or (script,)
script = self.tk.splitlist(data)[0]
self.deletecommand(script)
except TclError:
pass
self.tk.call('after', 'cancel', id)
def bell(self, displayof=0):
"""Ring a display's bell."""
self.tk.call(('bell',) + self._displayof(displayof))
# Clipboard handling:
def clipboard_get(self, **kw):
"""Retrieve data from the clipboard on window's display.
The window keyword defaults to the root window of the Tkinter
application.
The type keyword specifies the form in which the data is
to be returned and should be an atom name such as STRING
or FILE_NAME. Type defaults to STRING, except on X11, where the default
is to try UTF8_STRING and fall back to STRING.
This command is equivalent to:
selection_get(CLIPBOARD)
"""
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('clipboard', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('clipboard', 'get') + self._options(kw))
def clipboard_clear(self, **kw):
"""Clear the data in the Tk clipboard.
A widget specified for the optional displayof keyword
argument specifies the target display."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'clear') + self._options(kw))
def clipboard_append(self, string, **kw):
"""Append STRING to the Tk clipboard.
A widget specified at the optional displayof keyword
argument specifies the target display. The clipboard
can be retrieved with selection_get."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('clipboard', 'append') + self._options(kw)
+ ('--', string))
# XXX grab current w/o window argument
def grab_current(self):
"""Return widget which has currently the grab in this application
or None."""
name = self.tk.call('grab', 'current', self._w)
if not name: return None
return self._nametowidget(name)
def grab_release(self):
"""Release grab for this widget if currently set."""
self.tk.call('grab', 'release', self._w)
def grab_set(self):
"""Set grab for this widget.
A grab directs all events to this and descendant
widgets in the application."""
self.tk.call('grab', 'set', self._w)
def grab_set_global(self):
"""Set global grab for this widget.
A global grab directs all events to this and
descendant widgets on the display. Use with caution -
other applications do not get events anymore."""
self.tk.call('grab', 'set', '-global', self._w)
def grab_status(self):
"""Return None, "local" or "global" if this widget has
no, a local or a global grab."""
status = self.tk.call('grab', 'status', self._w)
if status == 'none': status = None
return status
def option_add(self, pattern, value, priority = None):
"""Set a VALUE (second parameter) for an option
PATTERN (first parameter).
An optional third parameter gives the numeric priority
(defaults to 80)."""
self.tk.call('option', 'add', pattern, value, priority)
def option_clear(self):
"""Clear the option database.
It will be reloaded if option_add is called."""
self.tk.call('option', 'clear')
def option_get(self, name, className):
"""Return the value for an option NAME for this widget
with CLASSNAME.
Values with higher priority override lower values."""
return self.tk.call('option', 'get', self._w, name, className)
def option_readfile(self, fileName, priority = None):
"""Read file FILENAME into the option database.
An optional second parameter gives the numeric
priority."""
self.tk.call('option', 'readfile', fileName, priority)
def selection_clear(self, **kw):
"""Clear the current X selection."""
if 'displayof' not in kw: kw['displayof'] = self._w
self.tk.call(('selection', 'clear') + self._options(kw))
def selection_get(self, **kw):
"""Return the contents of the current X selection.
A keyword parameter selection specifies the name of
the selection and defaults to PRIMARY. A keyword
parameter displayof specifies a widget on the display
to use. A keyword parameter type specifies the form of data to be
fetched, defaulting to STRING except on X11, where UTF8_STRING is tried
before STRING."""
if 'displayof' not in kw: kw['displayof'] = self._w
if 'type' not in kw and self._windowingsystem == 'x11':
try:
kw['type'] = 'UTF8_STRING'
return self.tk.call(('selection', 'get') + self._options(kw))
except TclError:
del kw['type']
return self.tk.call(('selection', 'get') + self._options(kw))
def selection_handle(self, command, **kw):
"""Specify a function COMMAND to call if the X
selection owned by this widget is queried by another
application.
This function must return the contents of the
selection. The function will be called with the
arguments OFFSET and LENGTH which allows the chunking
of very long selections. The following keyword
parameters can be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
name = self._register(command)
self.tk.call(('selection', 'handle') + self._options(kw)
+ (self._w, name))
def selection_own(self, **kw):
"""Become owner of X selection.
A keyword parameter selection specifies the name of
the selection (default PRIMARY)."""
self.tk.call(('selection', 'own') +
self._options(kw) + (self._w,))
def selection_own_get(self, **kw):
"""Return owner of X selection.
The following keyword parameter can
be provided:
selection - name of the selection (default PRIMARY),
type - type of the selection (e.g. STRING, FILE_NAME)."""
if 'displayof' not in kw: kw['displayof'] = self._w
name = self.tk.call(('selection', 'own') + self._options(kw))
if not name: return None
return self._nametowidget(name)
def send(self, interp, cmd, *args):
"""Send Tcl command CMD to different interpreter INTERP to be executed."""
return self.tk.call(('send', interp, cmd) + args)
def lower(self, belowThis=None):
"""Lower this widget in the stacking order."""
self.tk.call('lower', self._w, belowThis)
def tkraise(self, aboveThis=None):
"""Raise this widget in the stacking order."""
self.tk.call('raise', self._w, aboveThis)
lift = tkraise
def colormodel(self, value=None):
"""Useless. Not implemented in Tk."""
return self.tk.call('tk', 'colormodel', self._w, value)
def winfo_atom(self, name, displayof=0):
"""Return integer which represents atom NAME."""
args = ('winfo', 'atom') + self._displayof(displayof) + (name,)
return getint(self.tk.call(args))
def winfo_atomname(self, id, displayof=0):
"""Return name of atom with identifier ID."""
args = ('winfo', 'atomname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_cells(self):
"""Return number of cells in the colormap for this widget."""
return getint(
self.tk.call('winfo', 'cells', self._w))
def winfo_children(self):
"""Return a list of all widgets which are children of this widget."""
result = []
for child in self.tk.splitlist(
self.tk.call('winfo', 'children', self._w)):
try:
# Tcl sometimes returns extra windows, e.g. for
# menus; those need to be skipped
result.append(self._nametowidget(child))
except KeyError:
pass
return result
def winfo_class(self):
"""Return window class name of this widget."""
return self.tk.call('winfo', 'class', self._w)
def winfo_colormapfull(self):
"""Return true if at the last color request the colormap was full."""
return self.tk.getboolean(
self.tk.call('winfo', 'colormapfull', self._w))
def winfo_containing(self, rootX, rootY, displayof=0):
"""Return the widget which is at the root coordinates ROOTX, ROOTY."""
args = ('winfo', 'containing') \
+ self._displayof(displayof) + (rootX, rootY)
name = self.tk.call(args)
if not name: return None
return self._nametowidget(name)
def winfo_depth(self):
"""Return the number of bits per pixel."""
return getint(self.tk.call('winfo', 'depth', self._w))
def winfo_exists(self):
"""Return true if this widget exists."""
return getint(
self.tk.call('winfo', 'exists', self._w))
def winfo_fpixels(self, number):
"""Return the number of pixels for the given distance NUMBER
(e.g. "3c") as float."""
return getdouble(self.tk.call(
'winfo', 'fpixels', self._w, number))
def winfo_geometry(self):
"""Return geometry string for this widget in the form "widthxheight+X+Y"."""
return self.tk.call('winfo', 'geometry', self._w)
def winfo_height(self):
"""Return height of this widget."""
return getint(
self.tk.call('winfo', 'height', self._w))
def winfo_id(self):
"""Return identifier ID for this widget."""
return self.tk.getint(
self.tk.call('winfo', 'id', self._w))
def winfo_interps(self, displayof=0):
"""Return the name of all Tcl interpreters for this display."""
args = ('winfo', 'interps') + self._displayof(displayof)
return self.tk.splitlist(self.tk.call(args))
def winfo_ismapped(self):
"""Return true if this widget is mapped."""
return getint(
self.tk.call('winfo', 'ismapped', self._w))
def winfo_manager(self):
"""Return the window mananger name for this widget."""
return self.tk.call('winfo', 'manager', self._w)
def winfo_name(self):
"""Return the name of this widget."""
return self.tk.call('winfo', 'name', self._w)
def winfo_parent(self):
"""Return the name of the parent of this widget."""
return self.tk.call('winfo', 'parent', self._w)
def winfo_pathname(self, id, displayof=0):
"""Return the pathname of the widget given by ID."""
args = ('winfo', 'pathname') \
+ self._displayof(displayof) + (id,)
return self.tk.call(args)
def winfo_pixels(self, number):
"""Rounded integer value of winfo_fpixels."""
return getint(
self.tk.call('winfo', 'pixels', self._w, number))
def winfo_pointerx(self):
"""Return the x coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointerx', self._w))
def winfo_pointerxy(self):
"""Return a tuple of x and y coordinates of the pointer on the root window."""
return self._getints(
self.tk.call('winfo', 'pointerxy', self._w))
def winfo_pointery(self):
"""Return the y coordinate of the pointer on the root window."""
return getint(
self.tk.call('winfo', 'pointery', self._w))
def winfo_reqheight(self):
"""Return requested height of this widget."""
return getint(
self.tk.call('winfo', 'reqheight', self._w))
def winfo_reqwidth(self):
"""Return requested width of this widget."""
return getint(
self.tk.call('winfo', 'reqwidth', self._w))
def winfo_rgb(self, color):
"""Return tuple of decimal values for red, green, blue for
COLOR in this widget."""
return self._getints(
self.tk.call('winfo', 'rgb', self._w, color))
def winfo_rootx(self):
"""Return x coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rootx', self._w))
def winfo_rooty(self):
"""Return y coordinate of upper left corner of this widget on the
root window."""
return getint(
self.tk.call('winfo', 'rooty', self._w))
def winfo_screen(self):
"""Return the screen name of this widget."""
return self.tk.call('winfo', 'screen', self._w)
def winfo_screencells(self):
"""Return the number of the cells in the colormap of the screen
of this widget."""
return getint(
self.tk.call('winfo', 'screencells', self._w))
def winfo_screendepth(self):
"""Return the number of bits per pixel of the root window of the
screen of this widget."""
return getint(
self.tk.call('winfo', 'screendepth', self._w))
def winfo_screenheight(self):
"""Return the number of pixels of the height of the screen of this widget
in pixel."""
return getint(
self.tk.call('winfo', 'screenheight', self._w))
def winfo_screenmmheight(self):
"""Return the number of pixels of the height of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmheight', self._w))
def winfo_screenmmwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in mm."""
return getint(
self.tk.call('winfo', 'screenmmwidth', self._w))
def winfo_screenvisual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the default
colormodel of this screen."""
return self.tk.call('winfo', 'screenvisual', self._w)
def winfo_screenwidth(self):
"""Return the number of pixels of the width of the screen of
this widget in pixel."""
return getint(
self.tk.call('winfo', 'screenwidth', self._w))
def winfo_server(self):
"""Return information of the X-Server of the screen of this widget in
the form "XmajorRminor vendor vendorVersion"."""
return self.tk.call('winfo', 'server', self._w)
def winfo_toplevel(self):
"""Return the toplevel widget of this widget."""
return self._nametowidget(self.tk.call(
'winfo', 'toplevel', self._w))
def winfo_viewable(self):
"""Return true if the widget and all its higher ancestors are mapped."""
return getint(
self.tk.call('winfo', 'viewable', self._w))
def winfo_visual(self):
"""Return one of the strings directcolor, grayscale, pseudocolor,
staticcolor, staticgray, or truecolor for the
colormodel of this widget."""
return self.tk.call('winfo', 'visual', self._w)
def winfo_visualid(self):
"""Return the X identifier for the visual for this widget."""
return self.tk.call('winfo', 'visualid', self._w)
def winfo_visualsavailable(self, includeids=0):
"""Return a list of all visuals available for the screen
of this widget.
Each item in the list consists of a visual name (see winfo_visual), a
depth and if INCLUDEIDS=1 is given also the X identifier."""
data = self.tk.split(
self.tk.call('winfo', 'visualsavailable', self._w,
includeids and 'includeids' or None))
if type(data) is StringType:
data = [self.tk.split(data)]
return map(self.__winfo_parseitem, data)
def __winfo_parseitem(self, t):
"""Internal function."""
return t[:1] + tuple(map(self.__winfo_getint, t[1:]))
def __winfo_getint(self, x):
"""Internal function."""
return int(x, 0)
def winfo_vrootheight(self):
"""Return the height of the virtual root window associated with this
widget in pixels. If there is no virtual root window return the
height of the screen."""
return getint(
self.tk.call('winfo', 'vrootheight', self._w))
def winfo_vrootwidth(self):
"""Return the width of the virtual root window associated with this
widget in pixel. If there is no virtual root window return the
width of the screen."""
return getint(
self.tk.call('winfo', 'vrootwidth', self._w))
def winfo_vrootx(self):
"""Return the x offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrootx', self._w))
def winfo_vrooty(self):
"""Return the y offset of the virtual root relative to the root
window of the screen of this widget."""
return getint(
self.tk.call('winfo', 'vrooty', self._w))
def winfo_width(self):
"""Return the width of this widget."""
return getint(
self.tk.call('winfo', 'width', self._w))
def winfo_x(self):
"""Return the x coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'x', self._w))
def winfo_y(self):
"""Return the y coordinate of the upper left corner of this widget
in the parent."""
return getint(
self.tk.call('winfo', 'y', self._w))
def update(self):
"""Enter event loop until all pending events have been processed by Tcl."""
self.tk.call('update')
def update_idletasks(self):
"""Enter event loop until all idle callbacks have been called. This
will update the display of windows but not process events caused by
the user."""
self.tk.call('update', 'idletasks')
def bindtags(self, tagList=None):
"""Set or get the list of bindtags for this widget.
With no argument return the list of all bindtags associated with
this widget. With a list of strings as argument the bindtags are
set to this list. The bindtags determine in which order events are
processed (see bind)."""
if tagList is None:
return self.tk.splitlist(
self.tk.call('bindtags', self._w))
else:
self.tk.call('bindtags', self._w, tagList)
def _bind(self, what, sequence, func, add, needcleanup=1):
"""Internal function."""
if type(func) is StringType:
self.tk.call(what + (sequence, func))
elif func:
funcid = self._register(func, self._substitute,
needcleanup)
cmd = ('%sif {"[%s %s]" == "break"} break\n'
%
(add and '+' or '',
funcid, self._subst_format_str))
self.tk.call(what + (sequence, cmd))
return funcid
elif sequence:
return self.tk.call(what + (sequence,))
else:
return self.tk.splitlist(self.tk.call(what))
def bind(self, sequence=None, func=None, add=None):
"""Bind to this widget at event SEQUENCE a call to function FUNC.
SEQUENCE is a string of concatenated event
patterns. An event pattern is of the form
<MODIFIER-MODIFIER-TYPE-DETAIL> where MODIFIER is one
of Control, Mod2, M2, Shift, Mod3, M3, Lock, Mod4, M4,
Button1, B1, Mod5, M5 Button2, B2, Meta, M, Button3,
B3, Alt, Button4, B4, Double, Button5, B5 Triple,
Mod1, M1. TYPE is one of Activate, Enter, Map,
ButtonPress, Button, Expose, Motion, ButtonRelease
FocusIn, MouseWheel, Circulate, FocusOut, Property,
Colormap, Gravity Reparent, Configure, KeyPress, Key,
Unmap, Deactivate, KeyRelease Visibility, Destroy,
Leave and DETAIL is the button number for ButtonPress,
ButtonRelease and DETAIL is the Keysym for KeyPress and
KeyRelease. Examples are
<Control-Button-1> for pressing Control and mouse button 1 or
<Alt-A> for pressing A and the Alt key (KeyPress can be omitted).
An event pattern can also be a virtual event of the form
<<AString>> where AString can be arbitrary. This
event can be generated by event_generate.
If events are concatenated they must appear shortly
after each other.
FUNC will be called if the event sequence occurs with an
instance of Event as argument. If the return value of FUNC is
"break" no further bound function is invoked.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function.
Bind will return an identifier to allow deletion of the bound function with
unbind without memory leak.
If FUNC or SEQUENCE is omitted the bound function or list
of bound events are returned."""
return self._bind(('bind', self._w), sequence, func, add)
def unbind(self, sequence, funcid=None):
"""Unbind for this widget for event SEQUENCE the
function identified with FUNCID."""
self.tk.call('bind', self._w, sequence, '')
if funcid:
self.deletecommand(funcid)
def bind_all(self, sequence=None, func=None, add=None):
"""Bind to all widgets at an event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will
be called additionally to the other bound function or whether
it will replace the previous function. See bind for the return value."""
return self._bind(('bind', 'all'), sequence, func, add, 0)
def unbind_all(self, sequence):
"""Unbind for all widgets for event SEQUENCE all functions."""
self.tk.call('bind', 'all' , sequence, '')
def bind_class(self, className, sequence=None, func=None, add=None):
"""Bind to widgets with bindtag CLASSNAME at event
SEQUENCE a call of function FUNC. An additional
boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or
whether it will replace the previous function. See bind for
the return value."""
return self._bind(('bind', className), sequence, func, add, 0)
def unbind_class(self, className, sequence):
"""Unbind for a all widgets with bindtag CLASSNAME for event SEQUENCE
all functions."""
self.tk.call('bind', className , sequence, '')
def mainloop(self, n=0):
"""Call the mainloop of Tk."""
self.tk.mainloop(n)
def quit(self):
"""Quit the Tcl interpreter. All widgets will be destroyed."""
self.tk.quit()
def _getints(self, string):
"""Internal function."""
if string:
return tuple(map(getint, self.tk.splitlist(string)))
def _getdoubles(self, string):
"""Internal function."""
if string:
return tuple(map(getdouble, self.tk.splitlist(string)))
def _getboolean(self, string):
"""Internal function."""
if string:
return self.tk.getboolean(string)
def _displayof(self, displayof):
"""Internal function."""
if displayof:
return ('-displayof', displayof)
if displayof is None:
return ('-displayof', self._w)
return ()
@property
def _windowingsystem(self):
"""Internal function."""
try:
return self._root()._windowingsystem_cached
except AttributeError:
ws = self._root()._windowingsystem_cached = \
self.tk.call('tk', 'windowingsystem')
return ws
def _options(self, cnf, kw = None):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
else:
cnf = _cnfmerge(cnf)
res = ()
for k, v in cnf.items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
elif isinstance(v, (tuple, list)):
nv = []
for item in v:
if not isinstance(item, (basestring, int)):
break
elif isinstance(item, int):
nv.append('%d' % item)
else:
# format it to proper Tcl code if it contains space
nv.append(_stringify(item))
else:
v = ' '.join(nv)
res = res + ('-'+k, v)
return res
def nametowidget(self, name):
"""Return the Tkinter instance of a widget identified by
its Tcl name NAME."""
name = str(name).split('.')
w = self
if not name[0]:
w = w._root()
name = name[1:]
for n in name:
if not n:
break
w = w.children[n]
return w
_nametowidget = nametowidget
def _register(self, func, subst=None, needcleanup=1):
"""Return a newly created Tcl function. If this
function is called, the Python function FUNC will
be executed. An optional function SUBST can
be given which will be executed before FUNC."""
f = CallWrapper(func, subst, self).__call__
name = repr(id(f))
try:
func = func.im_func
except AttributeError:
pass
try:
name = name + func.__name__
except AttributeError:
pass
self.tk.createcommand(name, f)
if needcleanup:
if self._tclCommands is None:
self._tclCommands = []
self._tclCommands.append(name)
return name
register = _register
def _root(self):
"""Internal function."""
w = self
while w.master: w = w.master
return w
_subst_format = ('%#', '%b', '%f', '%h', '%k',
'%s', '%t', '%w', '%x', '%y',
'%A', '%E', '%K', '%N', '%W', '%T', '%X', '%Y', '%D')
_subst_format_str = " ".join(_subst_format)
def _substitute(self, *args):
"""Internal function."""
if len(args) != len(self._subst_format): return args
getboolean = self.tk.getboolean
getint = int
def getint_event(s):
"""Tk changed behavior in 8.4.2, returning "??" rather more often."""
try:
return int(s)
except ValueError:
return s
nsign, b, f, h, k, s, t, w, x, y, A, E, K, N, W, T, X, Y, D = args
# Missing: (a, c, d, m, o, v, B, R)
e = Event()
# serial field: valid vor all events
# number of button: ButtonPress and ButtonRelease events only
# height field: Configure, ConfigureRequest, Create,
# ResizeRequest, and Expose events only
# keycode field: KeyPress and KeyRelease events only
# time field: "valid for events that contain a time field"
# width field: Configure, ConfigureRequest, Create, ResizeRequest,
# and Expose events only
# x field: "valid for events that contain a x field"
# y field: "valid for events that contain a y field"
# keysym as decimal: KeyPress and KeyRelease events only
# x_root, y_root fields: ButtonPress, ButtonRelease, KeyPress,
# KeyRelease,and Motion events
e.serial = getint(nsign)
e.num = getint_event(b)
try: e.focus = getboolean(f)
except TclError: pass
e.height = getint_event(h)
e.keycode = getint_event(k)
e.state = getint_event(s)
e.time = getint_event(t)
e.width = getint_event(w)
e.x = getint_event(x)
e.y = getint_event(y)
e.char = A
try: e.send_event = getboolean(E)
except TclError: pass
e.keysym = K
e.keysym_num = getint_event(N)
e.type = T
try:
e.widget = self._nametowidget(W)
except KeyError:
e.widget = W
e.x_root = getint_event(X)
e.y_root = getint_event(Y)
try:
e.delta = getint(D)
except ValueError:
e.delta = 0
return (e,)
def _report_exception(self):
"""Internal function."""
import sys
exc, val, tb = sys.exc_type, sys.exc_value, sys.exc_traceback
root = self._root()
root.report_callback_exception(exc, val, tb)
def _configure(self, cmd, cnf, kw):
"""Internal function."""
if kw:
cnf = _cnfmerge((cnf, kw))
elif cnf:
cnf = _cnfmerge(cnf)
if cnf is None:
cnf = {}
for x in self.tk.split(
self.tk.call(_flatten((self._w, cmd)))):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) is StringType:
x = self.tk.split(
self.tk.call(_flatten((self._w, cmd, '-'+cnf))))
return (x[0][1:],) + x[1:]
self.tk.call(_flatten((self._w, cmd)) + self._options(cnf))
# These used to be defined in Widget:
def configure(self, cnf=None, **kw):
"""Configure resources of a widget.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method keys.
"""
return self._configure('configure', cnf, kw)
config = configure
def cget(self, key):
"""Return the resource value for a KEY given as string."""
return self.tk.call(self._w, 'cget', '-' + key)
__getitem__ = cget
def __setitem__(self, key, value):
self.configure({key: value})
def __contains__(self, key):
raise TypeError("Tkinter objects don't support 'in' tests.")
def keys(self):
"""Return a list of all resource names of this widget."""
return map(lambda x: x[0][1:],
self.tk.split(self.tk.call(self._w, 'configure')))
def __str__(self):
"""Return the window path name of this widget."""
return self._w
# Pack methods that apply to the master
_noarg_ = ['_noarg_']
def pack_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'pack', 'propagate', self._w))
else:
self.tk.call('pack', 'propagate', self._w, flag)
propagate = pack_propagate
def pack_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call('pack', 'slaves', self._w)))
slaves = pack_slaves
# Place method that applies to the master
def place_slaves(self):
"""Return a list of all slaves of this widget
in its packing order."""
return map(self._nametowidget,
self.tk.splitlist(
self.tk.call(
'place', 'slaves', self._w)))
# Grid methods that apply to the master
def grid_bbox(self, column=None, row=None, col2=None, row2=None):
"""Return a tuple of integer coordinates for the bounding
box of this widget controlled by the geometry manager grid.
If COLUMN, ROW is given the bounding box applies from
the cell with row and column 0 to the specified
cell. If COL2 and ROW2 are given the bounding box
starts at that cell.
The returned integers specify the offset of the upper left
corner in the master widget and the width and height.
"""
args = ('grid', 'bbox', self._w)
if column is not None and row is not None:
args = args + (column, row)
if col2 is not None and row2 is not None:
args = args + (col2, row2)
return self._getints(self.tk.call(*args)) or None
bbox = grid_bbox
def _grid_configure(self, command, index, cnf, kw):
"""Internal function."""
if type(cnf) is StringType and not kw:
if cnf[-1:] == '_':
cnf = cnf[:-1]
if cnf[:1] != '-':
cnf = '-'+cnf
options = (cnf,)
else:
options = self._options(cnf, kw)
if not options:
res = self.tk.call('grid',
command, self._w, index)
words = self.tk.splitlist(res)
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if not value:
value = None
elif '.' in value:
value = getdouble(value)
else:
value = getint(value)
dict[key] = value
return dict
res = self.tk.call(
('grid', command, self._w, index)
+ options)
if len(options) == 1:
if not res: return None
# In Tk 7.5, -width can be a float
if '.' in res: return getdouble(res)
return getint(res)
def grid_columnconfigure(self, index, cnf={}, **kw):
"""Configure column INDEX of a grid.
Valid resources are minsize (minimum size of the column),
weight (how much does additional space propagate to this column)
and pad (how much space to let additionally)."""
return self._grid_configure('columnconfigure', index, cnf, kw)
columnconfigure = grid_columnconfigure
def grid_location(self, x, y):
"""Return a tuple of column and row which identify the cell
at which the pixel at position X and Y inside the master
widget is located."""
return self._getints(
self.tk.call(
'grid', 'location', self._w, x, y)) or None
def grid_propagate(self, flag=_noarg_):
"""Set or get the status for propagation of geometry information.
A boolean argument specifies whether the geometry information
of the slaves will determine the size of this widget. If no argument
is given, the current setting will be returned.
"""
if flag is Misc._noarg_:
return self._getboolean(self.tk.call(
'grid', 'propagate', self._w))
else:
self.tk.call('grid', 'propagate', self._w, flag)
def grid_rowconfigure(self, index, cnf={}, **kw):
"""Configure row INDEX of a grid.
Valid resources are minsize (minimum size of the row),
weight (how much does additional space propagate to this row)
and pad (how much space to let additionally)."""
return self._grid_configure('rowconfigure', index, cnf, kw)
rowconfigure = grid_rowconfigure
def grid_size(self):
"""Return a tuple of the number of column and rows in the grid."""
return self._getints(
self.tk.call('grid', 'size', self._w)) or None
size = grid_size
def grid_slaves(self, row=None, column=None):
"""Return a list of all slaves of this widget
in its packing order."""
args = ()
if row is not None:
args = args + ('-row', row)
if column is not None:
args = args + ('-column', column)
return map(self._nametowidget,
self.tk.splitlist(self.tk.call(
('grid', 'slaves', self._w) + args)))
# Support for the "event" command, new in Tk 4.2.
# By Case Roole.
def event_add(self, virtual, *sequences):
"""Bind a virtual event VIRTUAL (of the form <<Name>>)
to an event SEQUENCE such that the virtual event is triggered
whenever SEQUENCE occurs."""
args = ('event', 'add', virtual) + sequences
self.tk.call(args)
def event_delete(self, virtual, *sequences):
"""Unbind a virtual event VIRTUAL from SEQUENCE."""
args = ('event', 'delete', virtual) + sequences
self.tk.call(args)
def event_generate(self, sequence, **kw):
"""Generate an event SEQUENCE. Additional
keyword arguments specify parameter of the event
(e.g. x, y, rootx, rooty)."""
args = ('event', 'generate', self._w, sequence)
for k, v in kw.items():
args = args + ('-%s' % k, str(v))
self.tk.call(args)
def event_info(self, virtual=None):
"""Return a list of all virtual events or the information
about the SEQUENCE bound to the virtual event VIRTUAL."""
return self.tk.splitlist(
self.tk.call('event', 'info', virtual))
# Image related commands
def image_names(self):
"""Return a list of all existing image names."""
return self.tk.call('image', 'names')
def image_types(self):
"""Return a list of all available image types (e.g. phote bitmap)."""
return self.tk.call('image', 'types')
class CallWrapper:
"""Internal class. Stores function to call when some user
defined Tcl function is called e.g. after an event occurred."""
def __init__(self, func, subst, widget):
"""Store FUNC, SUBST and WIDGET as members."""
self.func = func
self.subst = subst
self.widget = widget
def __call__(self, *args):
"""Apply first function SUBST to arguments, than FUNC."""
try:
if self.subst:
args = self.subst(*args)
return self.func(*args)
except SystemExit, msg:
raise SystemExit, msg
except:
self.widget._report_exception()
class XView:
"""Mix-in class for querying and changing the horizontal position
of a widget's window."""
def xview(self, *args):
"""Query and change the horizontal position of the view."""
res = self.tk.call(self._w, 'xview', *args)
if not args:
return self._getdoubles(res)
def xview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total width of the canvas is off-screen to the left."""
self.tk.call(self._w, 'xview', 'moveto', fraction)
def xview_scroll(self, number, what):
"""Shift the x-view according to NUMBER which is measured in "units"
or "pages" (WHAT)."""
self.tk.call(self._w, 'xview', 'scroll', number, what)
class YView:
"""Mix-in class for querying and changing the vertical position
of a widget's window."""
def yview(self, *args):
"""Query and change the vertical position of the view."""
res = self.tk.call(self._w, 'yview', *args)
if not args:
return self._getdoubles(res)
def yview_moveto(self, fraction):
"""Adjusts the view in the window so that FRACTION of the
total height of the canvas is off-screen to the top."""
self.tk.call(self._w, 'yview', 'moveto', fraction)
def yview_scroll(self, number, what):
"""Shift the y-view according to NUMBER which is measured in
"units" or "pages" (WHAT)."""
self.tk.call(self._w, 'yview', 'scroll', number, what)
class Wm:
"""Provides functions for the communication with the window manager."""
def wm_aspect(self,
minNumer=None, minDenom=None,
maxNumer=None, maxDenom=None):
"""Instruct the window manager to set the aspect ratio (width/height)
of this widget to be between MINNUMER/MINDENOM and MAXNUMER/MAXDENOM. Return a tuple
of the actual values if no argument is given."""
return self._getints(
self.tk.call('wm', 'aspect', self._w,
minNumer, minDenom,
maxNumer, maxDenom))
aspect = wm_aspect
def wm_attributes(self, *args):
"""This subcommand returns or sets platform specific attributes
The first form returns a list of the platform specific flags and
their values. The second form returns the value for the specific
option. The third form sets one or more of the values. The values
are as follows:
On Windows, -disabled gets or sets whether the window is in a
disabled state. -toolwindow gets or sets the style of the window
to toolwindow (as defined in the MSDN). -topmost gets or sets
whether this is a topmost window (displays above all other
windows).
On Macintosh, XXXXX
On Unix, there are currently no special attribute values.
"""
args = ('wm', 'attributes', self._w) + args
return self.tk.call(args)
attributes=wm_attributes
def wm_client(self, name=None):
"""Store NAME in WM_CLIENT_MACHINE property of this widget. Return
current value."""
return self.tk.call('wm', 'client', self._w, name)
client = wm_client
def wm_colormapwindows(self, *wlist):
"""Store list of window names (WLIST) into WM_COLORMAPWINDOWS property
of this widget. This list contains windows whose colormaps differ from their
parents. Return current list of widgets if WLIST is empty."""
if len(wlist) > 1:
wlist = (wlist,) # Tk needs a list of windows here
args = ('wm', 'colormapwindows', self._w) + wlist
return map(self._nametowidget, self.tk.call(args))
colormapwindows = wm_colormapwindows
def wm_command(self, value=None):
"""Store VALUE in WM_COMMAND property. It is the command
which shall be used to invoke the application. Return current
command if VALUE is None."""
return self.tk.call('wm', 'command', self._w, value)
command = wm_command
def wm_deiconify(self):
"""Deiconify this widget. If it was never mapped it will not be mapped.
On Windows it will raise this widget and give it the focus."""
return self.tk.call('wm', 'deiconify', self._w)
deiconify = wm_deiconify
def wm_focusmodel(self, model=None):
"""Set focus model to MODEL. "active" means that this widget will claim
the focus itself, "passive" means that the window manager shall give
the focus. Return current focus model if MODEL is None."""
return self.tk.call('wm', 'focusmodel', self._w, model)
focusmodel = wm_focusmodel
def wm_frame(self):
"""Return identifier for decorative frame of this widget if present."""
return self.tk.call('wm', 'frame', self._w)
frame = wm_frame
def wm_geometry(self, newGeometry=None):
"""Set geometry to NEWGEOMETRY of the form =widthxheight+x+y. Return
current value if None is given."""
return self.tk.call('wm', 'geometry', self._w, newGeometry)
geometry = wm_geometry
def wm_grid(self,
baseWidth=None, baseHeight=None,
widthInc=None, heightInc=None):
"""Instruct the window manager that this widget shall only be
resized on grid boundaries. WIDTHINC and HEIGHTINC are the width and
height of a grid unit in pixels. BASEWIDTH and BASEHEIGHT are the
number of grid units requested in Tk_GeometryRequest."""
return self._getints(self.tk.call(
'wm', 'grid', self._w,
baseWidth, baseHeight, widthInc, heightInc))
grid = wm_grid
def wm_group(self, pathName=None):
"""Set the group leader widgets for related widgets to PATHNAME. Return
the group leader of this widget if None is given."""
return self.tk.call('wm', 'group', self._w, pathName)
group = wm_group
def wm_iconbitmap(self, bitmap=None, default=None):
"""Set bitmap for the iconified widget to BITMAP. Return
the bitmap if None is given.
Under Windows, the DEFAULT parameter can be used to set the icon
for the widget and any descendents that don't have an icon set
explicitly. DEFAULT can be the relative path to a .ico file
(example: root.iconbitmap(default='myicon.ico') ). See Tk
documentation for more information."""
if default:
return self.tk.call('wm', 'iconbitmap', self._w, '-default', default)
else:
return self.tk.call('wm', 'iconbitmap', self._w, bitmap)
iconbitmap = wm_iconbitmap
def wm_iconify(self):
"""Display widget as icon."""
return self.tk.call('wm', 'iconify', self._w)
iconify = wm_iconify
def wm_iconmask(self, bitmap=None):
"""Set mask for the icon bitmap of this widget. Return the
mask if None is given."""
return self.tk.call('wm', 'iconmask', self._w, bitmap)
iconmask = wm_iconmask
def wm_iconname(self, newName=None):
"""Set the name of the icon for this widget. Return the name if
None is given."""
return self.tk.call('wm', 'iconname', self._w, newName)
iconname = wm_iconname
def wm_iconposition(self, x=None, y=None):
"""Set the position of the icon of this widget to X and Y. Return
a tuple of the current values of X and X if None is given."""
return self._getints(self.tk.call(
'wm', 'iconposition', self._w, x, y))
iconposition = wm_iconposition
def wm_iconwindow(self, pathName=None):
"""Set widget PATHNAME to be displayed instead of icon. Return the current
value if None is given."""
return self.tk.call('wm', 'iconwindow', self._w, pathName)
iconwindow = wm_iconwindow
def wm_maxsize(self, width=None, height=None):
"""Set max WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'maxsize', self._w, width, height))
maxsize = wm_maxsize
def wm_minsize(self, width=None, height=None):
"""Set min WIDTH and HEIGHT for this widget. If the window is gridded
the values are given in grid units. Return the current values if None
is given."""
return self._getints(self.tk.call(
'wm', 'minsize', self._w, width, height))
minsize = wm_minsize
def wm_overrideredirect(self, boolean=None):
"""Instruct the window manager to ignore this widget
if BOOLEAN is given with 1. Return the current value if None
is given."""
return self._getboolean(self.tk.call(
'wm', 'overrideredirect', self._w, boolean))
overrideredirect = wm_overrideredirect
def wm_positionfrom(self, who=None):
"""Instruct the window manager that the position of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'positionfrom', self._w, who)
positionfrom = wm_positionfrom
def wm_protocol(self, name=None, func=None):
"""Bind function FUNC to command NAME for this widget.
Return the function bound to NAME if None is given. NAME could be
e.g. "WM_SAVE_YOURSELF" or "WM_DELETE_WINDOW"."""
if hasattr(func, '__call__'):
command = self._register(func)
else:
command = func
return self.tk.call(
'wm', 'protocol', self._w, name, command)
protocol = wm_protocol
def wm_resizable(self, width=None, height=None):
"""Instruct the window manager whether this width can be resized
in WIDTH or HEIGHT. Both values are boolean values."""
return self.tk.call('wm', 'resizable', self._w, width, height)
resizable = wm_resizable
def wm_sizefrom(self, who=None):
"""Instruct the window manager that the size of this widget shall
be defined by the user if WHO is "user", and by its own policy if WHO is
"program"."""
return self.tk.call('wm', 'sizefrom', self._w, who)
sizefrom = wm_sizefrom
def wm_state(self, newstate=None):
"""Query or set the state of this widget as one of normal, icon,
iconic (see wm_iconwindow), withdrawn, or zoomed (Windows only)."""
return self.tk.call('wm', 'state', self._w, newstate)
state = wm_state
def wm_title(self, string=None):
"""Set the title of this widget."""
return self.tk.call('wm', 'title', self._w, string)
title = wm_title
def wm_transient(self, master=None):
"""Instruct the window manager that this widget is transient
with regard to widget MASTER."""
return self.tk.call('wm', 'transient', self._w, master)
transient = wm_transient
def wm_withdraw(self):
"""Withdraw this widget from the screen such that it is unmapped
and forgotten by the window manager. Re-draw it with wm_deiconify."""
return self.tk.call('wm', 'withdraw', self._w)
withdraw = wm_withdraw
class Tk(Misc, Wm):
"""Toplevel widget of Tk which represents mostly the main window
of an application. It has an associated Tcl interpreter."""
_w = '.'
def __init__(self, screenName=None, baseName=None, className='Tk',
useTk=1, sync=0, use=None):
"""Return a new Toplevel widget on screen SCREENNAME. A new Tcl interpreter will
be created. BASENAME will be used for the identification of the profile file (see
readprofile).
It is constructed from sys.argv[0] without extensions if None is given. CLASSNAME
is the name of the widget class."""
self.master = None
self.children = {}
self._tkloaded = 0
# to avoid recursions in the getattr code in case of failure, we
# ensure that self.tk is always _something_.
self.tk = None
if baseName is None:
import sys, os
baseName = os.path.basename(sys.argv[0])
baseName, ext = os.path.splitext(baseName)
if ext not in ('.py', '.pyc', '.pyo'):
baseName = baseName + ext
interactive = 0
self.tk = _tkinter.create(screenName, baseName, className, interactive, wantobjects, useTk, sync, use)
if useTk:
self._loadtk()
if not sys.flags.ignore_environment:
# Issue #16248: Honor the -E flag to avoid code injection.
self.readprofile(baseName, className)
def loadtk(self):
if not self._tkloaded:
self.tk.loadtk()
self._loadtk()
def _loadtk(self):
self._tkloaded = 1
global _default_root
# Version sanity checks
tk_version = self.tk.getvar('tk_version')
if tk_version != _tkinter.TK_VERSION:
raise RuntimeError, \
"tk.h version (%s) doesn't match libtk.a version (%s)" \
% (_tkinter.TK_VERSION, tk_version)
# Under unknown circumstances, tcl_version gets coerced to float
tcl_version = str(self.tk.getvar('tcl_version'))
if tcl_version != _tkinter.TCL_VERSION:
raise RuntimeError, \
"tcl.h version (%s) doesn't match libtcl.a version (%s)" \
% (_tkinter.TCL_VERSION, tcl_version)
if TkVersion < 4.0:
raise RuntimeError, \
"Tk 4.0 or higher is required; found Tk %s" \
% str(TkVersion)
# Create and register the tkerror and exit commands
# We need to inline parts of _register here, _ register
# would register differently-named commands.
if self._tclCommands is None:
self._tclCommands = []
self.tk.createcommand('tkerror', _tkerror)
self.tk.createcommand('exit', _exit)
self._tclCommands.append('tkerror')
self._tclCommands.append('exit')
if _support_default_root and not _default_root:
_default_root = self
self.protocol("WM_DELETE_WINDOW", self.destroy)
def destroy(self):
"""Destroy this and all descendants widgets. This will
end the application of this Tcl interpreter."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
Misc.destroy(self)
global _default_root
if _support_default_root and _default_root is self:
_default_root = None
def readprofile(self, baseName, className):
"""Internal function. It reads BASENAME.tcl and CLASSNAME.tcl into
the Tcl Interpreter and calls execfile on BASENAME.py and CLASSNAME.py if
such a file exists in the home directory."""
import os
if 'HOME' in os.environ: home = os.environ['HOME']
else: home = os.curdir
class_tcl = os.path.join(home, '.%s.tcl' % className)
class_py = os.path.join(home, '.%s.py' % className)
base_tcl = os.path.join(home, '.%s.tcl' % baseName)
base_py = os.path.join(home, '.%s.py' % baseName)
dir = {'self': self}
exec 'from Tkinter import *' in dir
if os.path.isfile(class_tcl):
self.tk.call('source', class_tcl)
if os.path.isfile(class_py):
execfile(class_py, dir)
if os.path.isfile(base_tcl):
self.tk.call('source', base_tcl)
if os.path.isfile(base_py):
execfile(base_py, dir)
def report_callback_exception(self, exc, val, tb):
"""Internal function. It reports exception on sys.stderr."""
import traceback, sys
sys.stderr.write("Exception in Tkinter callback\n")
sys.last_type = exc
sys.last_value = val
sys.last_traceback = tb
traceback.print_exception(exc, val, tb)
def __getattr__(self, attr):
"Delegate attribute access to the interpreter object"
return getattr(self.tk, attr)
# Ideally, the classes Pack, Place and Grid disappear, the
# pack/place/grid methods are defined on the Widget class, and
# everybody uses w.pack_whatever(...) instead of Pack.whatever(w,
# ...), with pack(), place() and grid() being short for
# pack_configure(), place_configure() and grid_columnconfigure(), and
# forget() being short for pack_forget(). As a practical matter, I'm
# afraid that there is too much code out there that may be using the
# Pack, Place or Grid class, so I leave them intact -- but only as
# backwards compatibility features. Also note that those methods that
# take a master as argument (e.g. pack_propagate) have been moved to
# the Misc class (which now incorporates all methods common between
# toplevel and interior widgets). Again, for compatibility, these are
# copied into the Pack, Place or Grid class.
def Tcl(screenName=None, baseName=None, className='Tk', useTk=0):
return Tk(screenName, baseName, className, useTk)
class Pack:
"""Geometry manager Pack.
Base class to use the methods pack_* in every widget."""
def pack_configure(self, cnf={}, **kw):
"""Pack a widget in the parent widget. Use as options:
after=widget - pack it after you have packed widget
anchor=NSEW (or subset) - position widget according to
given direction
before=widget - pack it before you will pack widget
expand=bool - expand widget if parent size grows
fill=NONE or X or Y or BOTH - fill widget if widget grows
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
side=TOP or BOTTOM or LEFT or RIGHT - where to add this widget.
"""
self.tk.call(
('pack', 'configure', self._w)
+ self._options(cnf, kw))
pack = configure = config = pack_configure
def pack_forget(self):
"""Unmap this widget and do not use it for the packing order."""
self.tk.call('pack', 'forget', self._w)
forget = pack_forget
def pack_info(self):
"""Return information about the packing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('pack', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = pack_info
propagate = pack_propagate = Misc.pack_propagate
slaves = pack_slaves = Misc.pack_slaves
class Place:
"""Geometry manager Place.
Base class to use the methods place_* in every widget."""
def place_configure(self, cnf={}, **kw):
"""Place a widget in the parent widget. Use as options:
in=master - master relative to which the widget is placed
in_=master - see 'in' option description
x=amount - locate anchor of this widget at position x of master
y=amount - locate anchor of this widget at position y of master
relx=amount - locate anchor of this widget between 0.0 and 1.0
relative to width of master (1.0 is right edge)
rely=amount - locate anchor of this widget between 0.0 and 1.0
relative to height of master (1.0 is bottom edge)
anchor=NSEW (or subset) - position anchor according to given direction
width=amount - width of this widget in pixel
height=amount - height of this widget in pixel
relwidth=amount - width of this widget between 0.0 and 1.0
relative to width of master (1.0 is the same width
as the master)
relheight=amount - height of this widget between 0.0 and 1.0
relative to height of master (1.0 is the same
height as the master)
bordermode="inside" or "outside" - whether to take border width of
master widget into account
"""
self.tk.call(
('place', 'configure', self._w)
+ self._options(cnf, kw))
place = configure = config = place_configure
def place_forget(self):
"""Unmap this widget."""
self.tk.call('place', 'forget', self._w)
forget = place_forget
def place_info(self):
"""Return information about the placing options
for this widget."""
words = self.tk.splitlist(
self.tk.call('place', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = place_info
slaves = place_slaves = Misc.place_slaves
class Grid:
"""Geometry manager Grid.
Base class to use the methods grid_* in every widget."""
# Thanks to Masazumi Yoshikawa (yosikawa@isi.edu)
def grid_configure(self, cnf={}, **kw):
"""Position a widget in the parent widget in a grid. Use as options:
column=number - use cell identified with given column (starting with 0)
columnspan=number - this widget will span several columns
in=master - use master to contain this widget
in_=master - see 'in' option description
ipadx=amount - add internal padding in x direction
ipady=amount - add internal padding in y direction
padx=amount - add padding in x direction
pady=amount - add padding in y direction
row=number - use cell identified with given row (starting with 0)
rowspan=number - this widget will span several rows
sticky=NSEW - if cell is larger on which sides will this
widget stick to the cell boundary
"""
self.tk.call(
('grid', 'configure', self._w)
+ self._options(cnf, kw))
grid = configure = config = grid_configure
bbox = grid_bbox = Misc.grid_bbox
columnconfigure = grid_columnconfigure = Misc.grid_columnconfigure
def grid_forget(self):
"""Unmap this widget."""
self.tk.call('grid', 'forget', self._w)
forget = grid_forget
def grid_remove(self):
"""Unmap this widget but remember the grid options."""
self.tk.call('grid', 'remove', self._w)
def grid_info(self):
"""Return information about the options
for positioning this widget in a grid."""
words = self.tk.splitlist(
self.tk.call('grid', 'info', self._w))
dict = {}
for i in range(0, len(words), 2):
key = words[i][1:]
value = words[i+1]
if value[:1] == '.':
value = self._nametowidget(value)
dict[key] = value
return dict
info = grid_info
location = grid_location = Misc.grid_location
propagate = grid_propagate = Misc.grid_propagate
rowconfigure = grid_rowconfigure = Misc.grid_rowconfigure
size = grid_size = Misc.grid_size
slaves = grid_slaves = Misc.grid_slaves
class BaseWidget(Misc):
"""Internal class."""
def _setup(self, master, cnf):
"""Internal function. Sets up information about children."""
if _support_default_root:
global _default_root
if not master:
if not _default_root:
_default_root = Tk()
master = _default_root
self.master = master
self.tk = master.tk
name = None
if 'name' in cnf:
name = cnf['name']
del cnf['name']
if not name:
name = repr(id(self))
self._name = name
if master._w=='.':
self._w = '.' + name
else:
self._w = master._w + '.' + name
self.children = {}
if self._name in self.master.children:
self.master.children[self._name].destroy()
self.master.children[self._name] = self
def __init__(self, master, widgetName, cnf={}, kw={}, extra=()):
"""Construct a widget with the parent widget MASTER, a name WIDGETNAME
and appropriate options."""
if kw:
cnf = _cnfmerge((cnf, kw))
self.widgetName = widgetName
BaseWidget._setup(self, master, cnf)
if self._tclCommands is None:
self._tclCommands = []
classes = []
for k in cnf.keys():
if type(k) is ClassType:
classes.append((k, cnf[k]))
del cnf[k]
self.tk.call(
(widgetName, self._w) + extra + self._options(cnf))
for k, v in classes:
k.configure(self, v)
def destroy(self):
"""Destroy this and all descendants widgets."""
for c in self.children.values(): c.destroy()
self.tk.call('destroy', self._w)
if self._name in self.master.children:
del self.master.children[self._name]
Misc.destroy(self)
def _do(self, name, args=()):
# XXX Obsolete -- better use self.tk.call directly!
return self.tk.call((self._w, name) + args)
class Widget(BaseWidget, Pack, Place, Grid):
"""Internal class.
Base class for a widget which can be positioned with the geometry managers
Pack, Place or Grid."""
pass
class Toplevel(BaseWidget, Wm):
"""Toplevel widget, e.g. for dialogs."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a toplevel widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, menu, relief, screen, takefocus,
use, visual, width."""
if kw:
cnf = _cnfmerge((cnf, kw))
extra = ()
for wmkey in ['screen', 'class_', 'class', 'visual',
'colormap']:
if wmkey in cnf:
val = cnf[wmkey]
# TBD: a hack needed because some keys
# are not valid as keyword arguments
if wmkey[-1] == '_': opt = '-'+wmkey[:-1]
else: opt = '-'+wmkey
extra = extra + (opt, val)
del cnf[wmkey]
BaseWidget.__init__(self, master, 'toplevel', cnf, {}, extra)
root = self._root()
self.iconname(root.iconname())
self.title(root.title())
self.protocol("WM_DELETE_WINDOW", self.destroy)
class Button(Widget):
"""Button widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a button widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, repeatdelay,
repeatinterval, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
command, compound, default, height,
overrelief, state, width
"""
Widget.__init__(self, master, 'button', cnf, kw)
def tkButtonEnter(self, *dummy):
self.tk.call('tkButtonEnter', self._w)
def tkButtonLeave(self, *dummy):
self.tk.call('tkButtonLeave', self._w)
def tkButtonDown(self, *dummy):
self.tk.call('tkButtonDown', self._w)
def tkButtonUp(self, *dummy):
self.tk.call('tkButtonUp', self._w)
def tkButtonInvoke(self, *dummy):
self.tk.call('tkButtonInvoke', self._w)
def flash(self):
"""Flash the button.
This is accomplished by redisplaying
the button several times, alternating between active and
normal colors. At the end of the flash the button is left
in the same normal/active state as when the command was
invoked. This command is ignored if the button's state is
disabled.
"""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Invoke the command associated with the button.
The return value is the return value from the command,
or an empty string if there is no command associated with
the button. This command is ignored if the button's state
is disabled.
"""
return self.tk.call(self._w, 'invoke')
# Indices:
# XXX I don't like these -- take them away
def AtEnd():
return 'end'
def AtInsert(*args):
s = 'insert'
for a in args:
if a: s = s + (' ' + a)
return s
def AtSelFirst():
return 'sel.first'
def AtSelLast():
return 'sel.last'
def At(x, y=None):
if y is None:
return '@%r' % (x,)
else:
return '@%r,%r' % (x, y)
class Canvas(Widget, XView, YView):
"""Canvas widget to display graphical elements like lines or text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a canvas widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, closeenough,
confine, cursor, height, highlightbackground, highlightcolor,
highlightthickness, insertbackground, insertborderwidth,
insertofftime, insertontime, insertwidth, offset, relief,
scrollregion, selectbackground, selectborderwidth, selectforeground,
state, takefocus, width, xscrollcommand, xscrollincrement,
yscrollcommand, yscrollincrement."""
Widget.__init__(self, master, 'canvas', cnf, kw)
def addtag(self, *args):
"""Internal function."""
self.tk.call((self._w, 'addtag') + args)
def addtag_above(self, newtag, tagOrId):
"""Add tag NEWTAG to all items above TAGORID."""
self.addtag(newtag, 'above', tagOrId)
def addtag_all(self, newtag):
"""Add tag NEWTAG to all items."""
self.addtag(newtag, 'all')
def addtag_below(self, newtag, tagOrId):
"""Add tag NEWTAG to all items below TAGORID."""
self.addtag(newtag, 'below', tagOrId)
def addtag_closest(self, newtag, x, y, halo=None, start=None):
"""Add tag NEWTAG to item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
self.addtag(newtag, 'closest', x, y, halo, start)
def addtag_enclosed(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items in the rectangle defined
by X1,Y1,X2,Y2."""
self.addtag(newtag, 'enclosed', x1, y1, x2, y2)
def addtag_overlapping(self, newtag, x1, y1, x2, y2):
"""Add tag NEWTAG to all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
self.addtag(newtag, 'overlapping', x1, y1, x2, y2)
def addtag_withtag(self, newtag, tagOrId):
"""Add tag NEWTAG to all items with TAGORID."""
self.addtag(newtag, 'withtag', tagOrId)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses all items with tags specified as arguments."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tag_unbind(self, tagOrId, sequence, funcid=None):
"""Unbind for all items with TAGORID for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'bind', tagOrId, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagOrId, sequence=None, func=None, add=None):
"""Bind to all items with TAGORID at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'bind', tagOrId),
sequence, func, add)
def canvasx(self, screenx, gridspacing=None):
"""Return the canvas x coordinate of pixel position SCREENX rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasx', screenx, gridspacing))
def canvasy(self, screeny, gridspacing=None):
"""Return the canvas y coordinate of pixel position SCREENY rounded
to nearest multiple of GRIDSPACING units."""
return getdouble(self.tk.call(
self._w, 'canvasy', screeny, gridspacing))
def coords(self, *args):
"""Return a list of coordinates for the item given in ARGS."""
# XXX Should use _flatten on args
return map(getdouble,
self.tk.splitlist(
self.tk.call((self._w, 'coords') + args)))
def _create(self, itemType, args, kw): # Args: (val, val, ..., cnf={})
"""Internal function."""
args = _flatten(args)
cnf = args[-1]
if type(cnf) in (DictionaryType, TupleType):
args = args[:-1]
else:
cnf = {}
return getint(self.tk.call(
self._w, 'create', itemType,
*(args + self._options(cnf, kw))))
def create_arc(self, *args, **kw):
"""Create arc shaped region with coordinates x1,y1,x2,y2."""
return self._create('arc', args, kw)
def create_bitmap(self, *args, **kw):
"""Create bitmap with coordinates x1,y1."""
return self._create('bitmap', args, kw)
def create_image(self, *args, **kw):
"""Create image item with coordinates x1,y1."""
return self._create('image', args, kw)
def create_line(self, *args, **kw):
"""Create line with coordinates x1,y1,...,xn,yn."""
return self._create('line', args, kw)
def create_oval(self, *args, **kw):
"""Create oval with coordinates x1,y1,x2,y2."""
return self._create('oval', args, kw)
def create_polygon(self, *args, **kw):
"""Create polygon with coordinates x1,y1,...,xn,yn."""
return self._create('polygon', args, kw)
def create_rectangle(self, *args, **kw):
"""Create rectangle with coordinates x1,y1,x2,y2."""
return self._create('rectangle', args, kw)
def create_text(self, *args, **kw):
"""Create text with coordinates x1,y1."""
return self._create('text', args, kw)
def create_window(self, *args, **kw):
"""Create window with coordinates x1,y1,x2,y2."""
return self._create('window', args, kw)
def dchars(self, *args):
"""Delete characters of text items identified by tag or id in ARGS (possibly
several times) from FIRST to LAST character (including)."""
self.tk.call((self._w, 'dchars') + args)
def delete(self, *args):
"""Delete items identified by all tag or ids contained in ARGS."""
self.tk.call((self._w, 'delete') + args)
def dtag(self, *args):
"""Delete tag or id given as last arguments in ARGS from items
identified by first argument in ARGS."""
self.tk.call((self._w, 'dtag') + args)
def find(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'find') + args)) or ()
def find_above(self, tagOrId):
"""Return items above TAGORID."""
return self.find('above', tagOrId)
def find_all(self):
"""Return all items."""
return self.find('all')
def find_below(self, tagOrId):
"""Return all items below TAGORID."""
return self.find('below', tagOrId)
def find_closest(self, x, y, halo=None, start=None):
"""Return item which is closest to pixel at X, Y.
If several match take the top-most.
All items closer than HALO are considered overlapping (all are
closests). If START is specified the next below this tag is taken."""
return self.find('closest', x, y, halo, start)
def find_enclosed(self, x1, y1, x2, y2):
"""Return all items in rectangle defined
by X1,Y1,X2,Y2."""
return self.find('enclosed', x1, y1, x2, y2)
def find_overlapping(self, x1, y1, x2, y2):
"""Return all items which overlap the rectangle
defined by X1,Y1,X2,Y2."""
return self.find('overlapping', x1, y1, x2, y2)
def find_withtag(self, tagOrId):
"""Return all items with TAGORID."""
return self.find('withtag', tagOrId)
def focus(self, *args):
"""Set focus to the first item specified in ARGS."""
return self.tk.call((self._w, 'focus') + args)
def gettags(self, *args):
"""Return tags associated with the first item specified in ARGS."""
return self.tk.splitlist(
self.tk.call((self._w, 'gettags') + args))
def icursor(self, *args):
"""Set cursor at position POS in the item identified by TAGORID.
In ARGS TAGORID must be first."""
self.tk.call((self._w, 'icursor') + args)
def index(self, *args):
"""Return position of cursor as integer in item specified in ARGS."""
return getint(self.tk.call((self._w, 'index') + args))
def insert(self, *args):
"""Insert TEXT in item TAGORID at position POS. ARGS must
be TAGORID POS TEXT."""
self.tk.call((self._w, 'insert') + args)
def itemcget(self, tagOrId, option):
"""Return the resource value for an OPTION for item TAGORID."""
return self.tk.call(
(self._w, 'itemcget') + (tagOrId, '-'+option))
def itemconfigure(self, tagOrId, cnf=None, **kw):
"""Configure resources of an item TAGORID.
The values for resources are specified as keyword
arguments. To get an overview about
the allowed keyword arguments call the method without arguments.
"""
return self._configure(('itemconfigure', tagOrId), cnf, kw)
itemconfig = itemconfigure
# lower, tkraise/lift hide Misc.lower, Misc.tkraise/lift,
# so the preferred name for them is tag_lower, tag_raise
# (similar to tag_bind, and similar to the Text widget);
# unfortunately can't delete the old ones yet (maybe in 1.6)
def tag_lower(self, *args):
"""Lower an item TAGORID given in ARGS
(optional below another item)."""
self.tk.call((self._w, 'lower') + args)
lower = tag_lower
def move(self, *args):
"""Move an item TAGORID given in ARGS."""
self.tk.call((self._w, 'move') + args)
def postscript(self, cnf={}, **kw):
"""Print the contents of the canvas to a postscript
file. Valid options: colormap, colormode, file, fontmap,
height, pageanchor, pageheight, pagewidth, pagex, pagey,
rotate, witdh, x, y."""
return self.tk.call((self._w, 'postscript') +
self._options(cnf, kw))
def tag_raise(self, *args):
"""Raise an item TAGORID given in ARGS
(optional above another item)."""
self.tk.call((self._w, 'raise') + args)
lift = tkraise = tag_raise
def scale(self, *args):
"""Scale item TAGORID with XORIGIN, YORIGIN, XSCALE, YSCALE."""
self.tk.call((self._w, 'scale') + args)
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y, gain=10):
"""Adjust the view of the canvas to GAIN times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y, gain)
def select_adjust(self, tagOrId, index):
"""Adjust the end of the selection near the cursor of an item TAGORID to index."""
self.tk.call(self._w, 'select', 'adjust', tagOrId, index)
def select_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'select', 'clear')
def select_from(self, tagOrId, index):
"""Set the fixed end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'from', tagOrId, index)
def select_item(self):
"""Return the item which has the selection."""
return self.tk.call(self._w, 'select', 'item') or None
def select_to(self, tagOrId, index):
"""Set the variable end of a selection in item TAGORID to INDEX."""
self.tk.call(self._w, 'select', 'to', tagOrId, index)
def type(self, tagOrId):
"""Return the type of the item TAGORID."""
return self.tk.call(self._w, 'type', tagOrId) or None
class Checkbutton(Widget):
"""Checkbutton widget which is either in on- or off-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a checkbutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, offvalue, onvalue, padx, pady, relief,
selectcolor, selectimage, state, takefocus, text, textvariable,
underline, variable, width, wraplength."""
Widget.__init__(self, master, 'checkbutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
def toggle(self):
"""Toggle the button."""
self.tk.call(self._w, 'toggle')
class Entry(Widget, XView):
"""Entry widget which allows to display simple text."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct an entry widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, highlightbackground,
highlightcolor, highlightthickness, insertbackground,
insertborderwidth, insertofftime, insertontime, insertwidth,
invalidcommand, invcmd, justify, relief, selectbackground,
selectborderwidth, selectforeground, show, state, takefocus,
textvariable, validate, validatecommand, vcmd, width,
xscrollcommand."""
Widget.__init__(self, master, 'entry', cnf, kw)
def delete(self, first, last=None):
"""Delete text from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Return the text."""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Insert cursor at INDEX."""
self.tk.call(self._w, 'icursor', index)
def index(self, index):
"""Return position of cursor."""
return getint(self.tk.call(
self._w, 'index', index))
def insert(self, index, string):
"""Insert STRING at INDEX."""
self.tk.call(self._w, 'insert', index, string)
def scan_mark(self, x):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x)
def scan_dragto(self, x):
"""Adjust the view of the canvas to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x)
def selection_adjust(self, index):
"""Adjust the end of the selection near the cursor to INDEX."""
self.tk.call(self._w, 'selection', 'adjust', index)
select_adjust = selection_adjust
def selection_clear(self):
"""Clear the selection if it is in this widget."""
self.tk.call(self._w, 'selection', 'clear')
select_clear = selection_clear
def selection_from(self, index):
"""Set the fixed end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'from', index)
select_from = selection_from
def selection_present(self):
"""Return True if there are characters selected in the entry, False
otherwise."""
return self.tk.getboolean(
self.tk.call(self._w, 'selection', 'present'))
select_present = selection_present
def selection_range(self, start, end):
"""Set the selection from START to END (not included)."""
self.tk.call(self._w, 'selection', 'range', start, end)
select_range = selection_range
def selection_to(self, index):
"""Set the variable end of a selection to INDEX."""
self.tk.call(self._w, 'selection', 'to', index)
select_to = selection_to
class Frame(Widget):
"""Frame widget which may contain other widgets and can have a 3D border."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a frame widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, class,
colormap, container, cursor, height, highlightbackground,
highlightcolor, highlightthickness, relief, takefocus, visual, width."""
cnf = _cnfmerge((cnf, kw))
extra = ()
if 'class_' in cnf:
extra = ('-class', cnf['class_'])
del cnf['class_']
elif 'class' in cnf:
extra = ('-class', cnf['class'])
del cnf['class']
Widget.__init__(self, master, 'frame', cnf, {}, extra)
class Label(Widget):
"""Label widget which can display text and bitmaps."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a label widget with the parent MASTER.
STANDARD OPTIONS
activebackground, activeforeground, anchor,
background, bitmap, borderwidth, cursor,
disabledforeground, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, image, justify,
padx, pady, relief, takefocus, text,
textvariable, underline, wraplength
WIDGET-SPECIFIC OPTIONS
height, state, width
"""
Widget.__init__(self, master, 'label', cnf, kw)
class Listbox(Widget, XView, YView):
"""Listbox widget which can display a list of strings."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a listbox widget with the parent MASTER.
Valid resource names: background, bd, bg, borderwidth, cursor,
exportselection, fg, font, foreground, height, highlightbackground,
highlightcolor, highlightthickness, relief, selectbackground,
selectborderwidth, selectforeground, selectmode, setgrid, takefocus,
width, xscrollcommand, yscrollcommand, listvariable."""
Widget.__init__(self, master, 'listbox', cnf, kw)
def activate(self, index):
"""Activate item identified by INDEX."""
self.tk.call(self._w, 'activate', index)
def bbox(self, *args):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a rectangle
which encloses the item identified by index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def curselection(self):
"""Return list of indices of currently selected item."""
# XXX Ought to apply self._getints()...
return self.tk.splitlist(self.tk.call(
self._w, 'curselection'))
def delete(self, first, last=None):
"""Delete items from FIRST to LAST (not included)."""
self.tk.call(self._w, 'delete', first, last)
def get(self, first, last=None):
"""Get list of items from FIRST to LAST (not included)."""
if last:
return self.tk.splitlist(self.tk.call(
self._w, 'get', first, last))
else:
return self.tk.call(self._w, 'get', first)
def index(self, index):
"""Return index of item identified with INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def insert(self, index, *elements):
"""Insert ELEMENTS at INDEX."""
self.tk.call((self._w, 'insert', index) + elements)
def nearest(self, y):
"""Get index of item which is nearest to y coordinate Y."""
return getint(self.tk.call(
self._w, 'nearest', y))
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the listbox to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def see(self, index):
"""Scroll such that INDEX is visible."""
self.tk.call(self._w, 'see', index)
def selection_anchor(self, index):
"""Set the fixed end oft the selection to INDEX."""
self.tk.call(self._w, 'selection', 'anchor', index)
select_anchor = selection_anchor
def selection_clear(self, first, last=None):
"""Clear the selection from FIRST to LAST (not included)."""
self.tk.call(self._w,
'selection', 'clear', first, last)
select_clear = selection_clear
def selection_includes(self, index):
"""Return 1 if INDEX is part of the selection."""
return self.tk.getboolean(self.tk.call(
self._w, 'selection', 'includes', index))
select_includes = selection_includes
def selection_set(self, first, last=None):
"""Set the selection from FIRST to LAST (not included) without
changing the currently selected elements."""
self.tk.call(self._w, 'selection', 'set', first, last)
select_set = selection_set
def size(self):
"""Return the number of elements in the listbox."""
return getint(self.tk.call(self._w, 'size'))
def itemcget(self, index, option):
"""Return the resource value for an ITEM and an OPTION."""
return self.tk.call(
(self._w, 'itemcget') + (index, '-'+option))
def itemconfigure(self, index, cnf=None, **kw):
"""Configure resources of an ITEM.
The values for resources are specified as keyword arguments.
To get an overview about the allowed keyword arguments
call the method without arguments.
Valid resource names: background, bg, foreground, fg,
selectbackground, selectforeground."""
return self._configure(('itemconfigure', index), cnf, kw)
itemconfig = itemconfigure
class Menu(Widget):
"""Menu widget which allows to display menu bars, pull-down menus and pop-up menus."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct menu widget with the parent MASTER.
Valid resource names: activebackground, activeborderwidth,
activeforeground, background, bd, bg, borderwidth, cursor,
disabledforeground, fg, font, foreground, postcommand, relief,
selectcolor, takefocus, tearoff, tearoffcommand, title, type."""
Widget.__init__(self, master, 'menu', cnf, kw)
def tk_bindForTraversal(self):
pass # obsolete since Tk 4.0
def tk_mbPost(self):
self.tk.call('tk_mbPost', self._w)
def tk_mbUnpost(self):
self.tk.call('tk_mbUnpost')
def tk_traverseToMenu(self, char):
self.tk.call('tk_traverseToMenu', self._w, char)
def tk_traverseWithinMenu(self, char):
self.tk.call('tk_traverseWithinMenu', self._w, char)
def tk_getMenuButtons(self):
return self.tk.call('tk_getMenuButtons', self._w)
def tk_nextMenu(self, count):
self.tk.call('tk_nextMenu', count)
def tk_nextMenuEntry(self, count):
self.tk.call('tk_nextMenuEntry', count)
def tk_invokeMenu(self):
self.tk.call('tk_invokeMenu', self._w)
def tk_firstMenu(self):
self.tk.call('tk_firstMenu', self._w)
def tk_mbButtonDown(self):
self.tk.call('tk_mbButtonDown', self._w)
def tk_popup(self, x, y, entry=""):
"""Post the menu at position X,Y with entry ENTRY."""
self.tk.call('tk_popup', self._w, x, y, entry)
def activate(self, index):
"""Activate entry at INDEX."""
self.tk.call(self._w, 'activate', index)
def add(self, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'add', itemType) +
self._options(cnf, kw))
def add_cascade(self, cnf={}, **kw):
"""Add hierarchical menu item."""
self.add('cascade', cnf or kw)
def add_checkbutton(self, cnf={}, **kw):
"""Add checkbutton menu item."""
self.add('checkbutton', cnf or kw)
def add_command(self, cnf={}, **kw):
"""Add command menu item."""
self.add('command', cnf or kw)
def add_radiobutton(self, cnf={}, **kw):
"""Addd radio menu item."""
self.add('radiobutton', cnf or kw)
def add_separator(self, cnf={}, **kw):
"""Add separator."""
self.add('separator', cnf or kw)
def insert(self, index, itemType, cnf={}, **kw):
"""Internal function."""
self.tk.call((self._w, 'insert', index, itemType) +
self._options(cnf, kw))
def insert_cascade(self, index, cnf={}, **kw):
"""Add hierarchical menu item at INDEX."""
self.insert(index, 'cascade', cnf or kw)
def insert_checkbutton(self, index, cnf={}, **kw):
"""Add checkbutton menu item at INDEX."""
self.insert(index, 'checkbutton', cnf or kw)
def insert_command(self, index, cnf={}, **kw):
"""Add command menu item at INDEX."""
self.insert(index, 'command', cnf or kw)
def insert_radiobutton(self, index, cnf={}, **kw):
"""Addd radio menu item at INDEX."""
self.insert(index, 'radiobutton', cnf or kw)
def insert_separator(self, index, cnf={}, **kw):
"""Add separator at INDEX."""
self.insert(index, 'separator', cnf or kw)
def delete(self, index1, index2=None):
"""Delete menu items between INDEX1 and INDEX2 (included)."""
if index2 is None:
index2 = index1
num_index1, num_index2 = self.index(index1), self.index(index2)
if (num_index1 is None) or (num_index2 is None):
num_index1, num_index2 = 0, -1
for i in range(num_index1, num_index2 + 1):
if 'command' in self.entryconfig(i):
c = str(self.entrycget(i, 'command'))
if c:
self.deletecommand(c)
self.tk.call(self._w, 'delete', index1, index2)
def entrycget(self, index, option):
"""Return the resource value of an menu item for OPTION at INDEX."""
return self.tk.call(self._w, 'entrycget', index, '-' + option)
def entryconfigure(self, index, cnf=None, **kw):
"""Configure a menu item at INDEX."""
return self._configure(('entryconfigure', index), cnf, kw)
entryconfig = entryconfigure
def index(self, index):
"""Return the index of a menu item identified by INDEX."""
i = self.tk.call(self._w, 'index', index)
if i == 'none': return None
return getint(i)
def invoke(self, index):
"""Invoke a menu item identified by INDEX and execute
the associated command."""
return self.tk.call(self._w, 'invoke', index)
def post(self, x, y):
"""Display a menu at position X,Y."""
self.tk.call(self._w, 'post', x, y)
def type(self, index):
"""Return the type of the menu item at INDEX."""
return self.tk.call(self._w, 'type', index)
def unpost(self):
"""Unmap a menu."""
self.tk.call(self._w, 'unpost')
def yposition(self, index):
"""Return the y-position of the topmost pixel of the menu item at INDEX."""
return getint(self.tk.call(
self._w, 'yposition', index))
class Menubutton(Widget):
"""Menubutton widget, obsolete since Tk8.0."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'menubutton', cnf, kw)
class Message(Widget):
"""Message widget to display multiline text. Obsolete since Label does it too."""
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'message', cnf, kw)
class Radiobutton(Widget):
"""Radiobutton widget which shows only one of several buttons in on-state."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a radiobutton widget with the parent MASTER.
Valid resource names: activebackground, activeforeground, anchor,
background, bd, bg, bitmap, borderwidth, command, cursor,
disabledforeground, fg, font, foreground, height,
highlightbackground, highlightcolor, highlightthickness, image,
indicatoron, justify, padx, pady, relief, selectcolor, selectimage,
state, takefocus, text, textvariable, underline, value, variable,
width, wraplength."""
Widget.__init__(self, master, 'radiobutton', cnf, kw)
def deselect(self):
"""Put the button in off-state."""
self.tk.call(self._w, 'deselect')
def flash(self):
"""Flash the button."""
self.tk.call(self._w, 'flash')
def invoke(self):
"""Toggle the button and invoke a command if given as resource."""
return self.tk.call(self._w, 'invoke')
def select(self):
"""Put the button in on-state."""
self.tk.call(self._w, 'select')
class Scale(Widget):
"""Scale widget which can display a numerical scale."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scale widget with the parent MASTER.
Valid resource names: activebackground, background, bigincrement, bd,
bg, borderwidth, command, cursor, digits, fg, font, foreground, from,
highlightbackground, highlightcolor, highlightthickness, label,
length, orient, relief, repeatdelay, repeatinterval, resolution,
showvalue, sliderlength, sliderrelief, state, takefocus,
tickinterval, to, troughcolor, variable, width."""
Widget.__init__(self, master, 'scale', cnf, kw)
def get(self):
"""Get the current value as integer or float."""
value = self.tk.call(self._w, 'get')
try:
return getint(value)
except ValueError:
return getdouble(value)
def set(self, value):
"""Set the value to VALUE."""
self.tk.call(self._w, 'set', value)
def coords(self, value=None):
"""Return a tuple (X,Y) of the point along the centerline of the
trough that corresponds to VALUE or the current value if None is
given."""
return self._getints(self.tk.call(self._w, 'coords', value))
def identify(self, x, y):
"""Return where the point X,Y lies. Valid return values are "slider",
"though1" and "though2"."""
return self.tk.call(self._w, 'identify', x, y)
class Scrollbar(Widget):
"""Scrollbar widget which displays a slider at a certain position."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a scrollbar widget with the parent MASTER.
Valid resource names: activebackground, activerelief,
background, bd, bg, borderwidth, command, cursor,
elementborderwidth, highlightbackground,
highlightcolor, highlightthickness, jump, orient,
relief, repeatdelay, repeatinterval, takefocus,
troughcolor, width."""
Widget.__init__(self, master, 'scrollbar', cnf, kw)
def activate(self, index):
"""Display the element at INDEX with activebackground and activerelief.
INDEX can be "arrow1","slider" or "arrow2"."""
self.tk.call(self._w, 'activate', index)
def delta(self, deltax, deltay):
"""Return the fractional change of the scrollbar setting if it
would be moved by DELTAX or DELTAY pixels."""
return getdouble(
self.tk.call(self._w, 'delta', deltax, deltay))
def fraction(self, x, y):
"""Return the fractional value which corresponds to a slider
position of X,Y."""
return getdouble(self.tk.call(self._w, 'fraction', x, y))
def identify(self, x, y):
"""Return the element under position X,Y as one of
"arrow1","slider","arrow2" or ""."""
return self.tk.call(self._w, 'identify', x, y)
def get(self):
"""Return the current fractional values (upper and lower end)
of the slider position."""
return self._getdoubles(self.tk.call(self._w, 'get'))
def set(self, *args):
"""Set the fractional values of the slider position (upper and
lower ends as value between 0 and 1)."""
self.tk.call((self._w, 'set') + args)
class Text(Widget, XView, YView):
"""Text widget which can display text in various forms."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a text widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor,
exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, padx, pady,
relief, selectbackground,
selectborderwidth, selectforeground,
setgrid, takefocus,
xscrollcommand, yscrollcommand,
WIDGET-SPECIFIC OPTIONS
autoseparators, height, maxundo,
spacing1, spacing2, spacing3,
state, tabs, undo, width, wrap,
"""
Widget.__init__(self, master, 'text', cnf, kw)
def bbox(self, *args):
"""Return a tuple of (x,y,width,height) which gives the bounding
box of the visible part of the character at the index in ARGS."""
return self._getints(
self.tk.call((self._w, 'bbox') + args)) or None
def tk_textSelectTo(self, index):
self.tk.call('tk_textSelectTo', self._w, index)
def tk_textBackspace(self):
self.tk.call('tk_textBackspace', self._w)
def tk_textIndexCloser(self, a, b, c):
self.tk.call('tk_textIndexCloser', self._w, a, b, c)
def tk_textResetAnchor(self, index):
self.tk.call('tk_textResetAnchor', self._w, index)
def compare(self, index1, op, index2):
"""Return whether between index INDEX1 and index INDEX2 the
relation OP is satisfied. OP is one of <, <=, ==, >=, >, or !=."""
return self.tk.getboolean(self.tk.call(
self._w, 'compare', index1, op, index2))
def debug(self, boolean=None):
"""Turn on the internal consistency checks of the B-Tree inside the text
widget according to BOOLEAN."""
return self.tk.getboolean(self.tk.call(
self._w, 'debug', boolean))
def delete(self, index1, index2=None):
"""Delete the characters between INDEX1 and INDEX2 (not included)."""
self.tk.call(self._w, 'delete', index1, index2)
def dlineinfo(self, index):
"""Return tuple (x,y,width,height,baseline) giving the bounding box
and baseline position of the visible part of the line containing
the character at INDEX."""
return self._getints(self.tk.call(self._w, 'dlineinfo', index))
def dump(self, index1, index2=None, command=None, **kw):
"""Return the contents of the widget between index1 and index2.
The type of contents returned in filtered based on the keyword
parameters; if 'all', 'image', 'mark', 'tag', 'text', or 'window' are
given and true, then the corresponding items are returned. The result
is a list of triples of the form (key, value, index). If none of the
keywords are true then 'all' is used by default.
If the 'command' argument is given, it is called once for each element
of the list of triples, with the values of each triple serving as the
arguments to the function. In this case the list is not returned."""
args = []
func_name = None
result = None
if not command:
# Never call the dump command without the -command flag, since the
# output could involve Tcl quoting and would be a pain to parse
# right. Instead just set the command to build a list of triples
# as if we had done the parsing.
result = []
def append_triple(key, value, index, result=result):
result.append((key, value, index))
command = append_triple
try:
if not isinstance(command, str):
func_name = command = self._register(command)
args += ["-command", command]
for key in kw:
if kw[key]: args.append("-" + key)
args.append(index1)
if index2:
args.append(index2)
self.tk.call(self._w, "dump", *args)
return result
finally:
if func_name:
self.deletecommand(func_name)
## new in tk8.4
def edit(self, *args):
"""Internal method
This method controls the undo mechanism and
the modified flag. The exact behavior of the
command depends on the option argument that
follows the edit argument. The following forms
of the command are currently supported:
edit_modified, edit_redo, edit_reset, edit_separator
and edit_undo
"""
return self.tk.call(self._w, 'edit', *args)
def edit_modified(self, arg=None):
"""Get or Set the modified flag
If arg is not specified, returns the modified
flag of the widget. The insert, delete, edit undo and
edit redo commands or the user can set or clear the
modified flag. If boolean is specified, sets the
modified flag of the widget to arg.
"""
return self.edit("modified", arg)
def edit_redo(self):
"""Redo the last undone edit
When the undo option is true, reapplies the last
undone edits provided no other edits were done since
then. Generates an error when the redo stack is empty.
Does nothing when the undo option is false.
"""
return self.edit("redo")
def edit_reset(self):
"""Clears the undo and redo stacks
"""
return self.edit("reset")
def edit_separator(self):
"""Inserts a separator (boundary) on the undo stack.
Does nothing when the undo option is false
"""
return self.edit("separator")
def edit_undo(self):
"""Undoes the last edit action
If the undo option is true. An edit action is defined
as all the insert and delete commands that are recorded
on the undo stack in between two separators. Generates
an error when the undo stack is empty. Does nothing
when the undo option is false
"""
return self.edit("undo")
def get(self, index1, index2=None):
"""Return the text from INDEX1 to INDEX2 (not included)."""
return self.tk.call(self._w, 'get', index1, index2)
# (Image commands are new in 8.0)
def image_cget(self, index, option):
"""Return the value of OPTION of an embedded image at INDEX."""
if option[:1] != "-":
option = "-" + option
if option[-1:] == "_":
option = option[:-1]
return self.tk.call(self._w, "image", "cget", index, option)
def image_configure(self, index, cnf=None, **kw):
"""Configure an embedded image at INDEX."""
return self._configure(('image', 'configure', index), cnf, kw)
def image_create(self, index, cnf={}, **kw):
"""Create an embedded image at INDEX."""
return self.tk.call(
self._w, "image", "create", index,
*self._options(cnf, kw))
def image_names(self):
"""Return all names of embedded images in this widget."""
return self.tk.call(self._w, "image", "names")
def index(self, index):
"""Return the index in the form line.char for INDEX."""
return str(self.tk.call(self._w, 'index', index))
def insert(self, index, chars, *args):
"""Insert CHARS before the characters at INDEX. An additional
tag can be given in ARGS. Additional CHARS and tags can follow in ARGS."""
self.tk.call((self._w, 'insert', index, chars) + args)
def mark_gravity(self, markName, direction=None):
"""Change the gravity of a mark MARKNAME to DIRECTION (LEFT or RIGHT).
Return the current value if None is given for DIRECTION."""
return self.tk.call(
(self._w, 'mark', 'gravity', markName, direction))
def mark_names(self):
"""Return all mark names."""
return self.tk.splitlist(self.tk.call(
self._w, 'mark', 'names'))
def mark_set(self, markName, index):
"""Set mark MARKNAME before the character at INDEX."""
self.tk.call(self._w, 'mark', 'set', markName, index)
def mark_unset(self, *markNames):
"""Delete all marks in MARKNAMES."""
self.tk.call((self._w, 'mark', 'unset') + markNames)
def mark_next(self, index):
"""Return the name of the next mark after INDEX."""
return self.tk.call(self._w, 'mark', 'next', index) or None
def mark_previous(self, index):
"""Return the name of the previous mark before INDEX."""
return self.tk.call(self._w, 'mark', 'previous', index) or None
def scan_mark(self, x, y):
"""Remember the current X, Y coordinates."""
self.tk.call(self._w, 'scan', 'mark', x, y)
def scan_dragto(self, x, y):
"""Adjust the view of the text to 10 times the
difference between X and Y and the coordinates given in
scan_mark."""
self.tk.call(self._w, 'scan', 'dragto', x, y)
def search(self, pattern, index, stopindex=None,
forwards=None, backwards=None, exact=None,
regexp=None, nocase=None, count=None, elide=None):
"""Search PATTERN beginning from INDEX until STOPINDEX.
Return the index of the first character of a match or an
empty string."""
args = [self._w, 'search']
if forwards: args.append('-forwards')
if backwards: args.append('-backwards')
if exact: args.append('-exact')
if regexp: args.append('-regexp')
if nocase: args.append('-nocase')
if elide: args.append('-elide')
if count: args.append('-count'); args.append(count)
if pattern and pattern[0] == '-': args.append('--')
args.append(pattern)
args.append(index)
if stopindex: args.append(stopindex)
return str(self.tk.call(tuple(args)))
def see(self, index):
"""Scroll such that the character at INDEX is visible."""
self.tk.call(self._w, 'see', index)
def tag_add(self, tagName, index1, *args):
"""Add tag TAGNAME to all characters between INDEX1 and index2 in ARGS.
Additional pairs of indices may follow in ARGS."""
self.tk.call(
(self._w, 'tag', 'add', tagName, index1) + args)
def tag_unbind(self, tagName, sequence, funcid=None):
"""Unbind for all characters with TAGNAME for event SEQUENCE the
function identified with FUNCID."""
self.tk.call(self._w, 'tag', 'bind', tagName, sequence, '')
if funcid:
self.deletecommand(funcid)
def tag_bind(self, tagName, sequence, func, add=None):
"""Bind to all characters with TAGNAME at event SEQUENCE a call to function FUNC.
An additional boolean parameter ADD specifies whether FUNC will be
called additionally to the other bound function or whether it will
replace the previous function. See bind for the return value."""
return self._bind((self._w, 'tag', 'bind', tagName),
sequence, func, add)
def tag_cget(self, tagName, option):
"""Return the value of OPTION for tag TAGNAME."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'tag', 'cget', tagName, option)
def tag_configure(self, tagName, cnf=None, **kw):
"""Configure a tag TAGNAME."""
return self._configure(('tag', 'configure', tagName), cnf, kw)
tag_config = tag_configure
def tag_delete(self, *tagNames):
"""Delete all tags in TAGNAMES."""
self.tk.call((self._w, 'tag', 'delete') + tagNames)
def tag_lower(self, tagName, belowThis=None):
"""Change the priority of tag TAGNAME such that it is lower
than the priority of BELOWTHIS."""
self.tk.call(self._w, 'tag', 'lower', tagName, belowThis)
def tag_names(self, index=None):
"""Return a list of all tag names."""
return self.tk.splitlist(
self.tk.call(self._w, 'tag', 'names', index))
def tag_nextrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched forward from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'nextrange', tagName, index1, index2))
def tag_prevrange(self, tagName, index1, index2=None):
"""Return a list of start and end index for the first sequence of
characters between INDEX1 and INDEX2 which all have tag TAGNAME.
The text is searched backwards from INDEX1."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'prevrange', tagName, index1, index2))
def tag_raise(self, tagName, aboveThis=None):
"""Change the priority of tag TAGNAME such that it is higher
than the priority of ABOVETHIS."""
self.tk.call(
self._w, 'tag', 'raise', tagName, aboveThis)
def tag_ranges(self, tagName):
"""Return a list of ranges of text which have tag TAGNAME."""
return self.tk.splitlist(self.tk.call(
self._w, 'tag', 'ranges', tagName))
def tag_remove(self, tagName, index1, index2=None):
"""Remove tag TAGNAME from all characters between INDEX1 and INDEX2."""
self.tk.call(
self._w, 'tag', 'remove', tagName, index1, index2)
def window_cget(self, index, option):
"""Return the value of OPTION of an embedded window at INDEX."""
if option[:1] != '-':
option = '-' + option
if option[-1:] == '_':
option = option[:-1]
return self.tk.call(self._w, 'window', 'cget', index, option)
def window_configure(self, index, cnf=None, **kw):
"""Configure an embedded window at INDEX."""
return self._configure(('window', 'configure', index), cnf, kw)
window_config = window_configure
def window_create(self, index, cnf={}, **kw):
"""Create a window at INDEX."""
self.tk.call(
(self._w, 'window', 'create', index)
+ self._options(cnf, kw))
def window_names(self):
"""Return all names of embedded windows in this widget."""
return self.tk.splitlist(
self.tk.call(self._w, 'window', 'names'))
def yview_pickplace(self, *what):
"""Obsolete function, use see."""
self.tk.call((self._w, 'yview', '-pickplace') + what)
class _setit:
"""Internal class. It wraps the command in the widget OptionMenu."""
def __init__(self, var, value, callback=None):
self.__value = value
self.__var = var
self.__callback = callback
def __call__(self, *args):
self.__var.set(self.__value)
if self.__callback:
self.__callback(self.__value, *args)
class OptionMenu(Menubutton):
"""OptionMenu which allows the user to select a value from a menu."""
def __init__(self, master, variable, value, *values, **kwargs):
"""Construct an optionmenu widget with the parent MASTER, with
the resource textvariable set to VARIABLE, the initially selected
value VALUE, the other menu values VALUES and an additional
keyword argument command."""
kw = {"borderwidth": 2, "textvariable": variable,
"indicatoron": 1, "relief": RAISED, "anchor": "c",
"highlightthickness": 2}
Widget.__init__(self, master, "menubutton", kw)
self.widgetName = 'tk_optionMenu'
menu = self.__menu = Menu(self, name="menu", tearoff=0)
self.menuname = menu._w
# 'command' is the only supported keyword
callback = kwargs.get('command')
if 'command' in kwargs:
del kwargs['command']
if kwargs:
raise TclError, 'unknown option -'+kwargs.keys()[0]
menu.add_command(label=value,
command=_setit(variable, value, callback))
for v in values:
menu.add_command(label=v,
command=_setit(variable, v, callback))
self["menu"] = menu
def __getitem__(self, name):
if name == 'menu':
return self.__menu
return Widget.__getitem__(self, name)
def destroy(self):
"""Destroy this widget and the associated menu."""
Menubutton.destroy(self)
self.__menu = None
class Image:
"""Base class for images."""
_last_id = 0
def __init__(self, imgtype, name=None, cnf={}, master=None, **kw):
self.name = None
if not master:
master = _default_root
if not master:
raise RuntimeError, 'Too early to create image'
self.tk = master.tk
if not name:
Image._last_id += 1
name = "pyimage%r" % (Image._last_id,) # tk itself would use image<x>
# The following is needed for systems where id(x)
# can return a negative number, such as Linux/m68k:
if name[0] == '-': name = '_' + name[1:]
if kw and cnf: cnf = _cnfmerge((cnf, kw))
elif kw: cnf = kw
options = ()
for k, v in cnf.items():
if hasattr(v, '__call__'):
v = self._register(v)
options = options + ('-'+k, v)
self.tk.call(('image', 'create', imgtype, name,) + options)
self.name = name
def __str__(self): return self.name
def __del__(self):
if self.name:
try:
self.tk.call('image', 'delete', self.name)
except TclError:
# May happen if the root was destroyed
pass
def __setitem__(self, key, value):
self.tk.call(self.name, 'configure', '-'+key, value)
def __getitem__(self, key):
return self.tk.call(self.name, 'configure', '-'+key)
def configure(self, **kw):
"""Configure the image."""
res = ()
for k, v in _cnfmerge(kw).items():
if v is not None:
if k[-1] == '_': k = k[:-1]
if hasattr(v, '__call__'):
v = self._register(v)
res = res + ('-'+k, v)
self.tk.call((self.name, 'config') + res)
config = configure
def height(self):
"""Return the height of the image."""
return getint(
self.tk.call('image', 'height', self.name))
def type(self):
"""Return the type of the imgage, e.g. "photo" or "bitmap"."""
return self.tk.call('image', 'type', self.name)
def width(self):
"""Return the width of the image."""
return getint(
self.tk.call('image', 'width', self.name))
class PhotoImage(Image):
"""Widget which can display colored images in GIF, PPM/PGM format."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create an image with NAME.
Valid resource names: data, format, file, gamma, height, palette,
width."""
Image.__init__(self, 'photo', name, cnf, master, **kw)
def blank(self):
"""Display a transparent image."""
self.tk.call(self.name, 'blank')
def cget(self, option):
"""Return the value of OPTION."""
return self.tk.call(self.name, 'cget', '-' + option)
# XXX config
def __getitem__(self, key):
return self.tk.call(self.name, 'cget', '-' + key)
# XXX copy -from, -to, ...?
def copy(self):
"""Return a new PhotoImage with the same image as this widget."""
destImage = PhotoImage()
self.tk.call(destImage, 'copy', self.name)
return destImage
def zoom(self,x,y=''):
"""Return a new PhotoImage with the same image as this widget
but zoom it with X and Y."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-zoom',x,y)
return destImage
def subsample(self,x,y=''):
"""Return a new PhotoImage based on the same image as this widget
but use only every Xth or Yth pixel."""
destImage = PhotoImage()
if y=='': y=x
self.tk.call(destImage, 'copy', self.name, '-subsample',x,y)
return destImage
def get(self, x, y):
"""Return the color (red, green, blue) of the pixel at X,Y."""
return self.tk.call(self.name, 'get', x, y)
def put(self, data, to=None):
"""Put row formatted colors to image starting from
position TO, e.g. image.put("{red green} {blue yellow}", to=(4,6))"""
args = (self.name, 'put', data)
if to:
if to[0] == '-to':
to = to[1:]
args = args + ('-to',) + tuple(to)
self.tk.call(args)
# XXX read
def write(self, filename, format=None, from_coords=None):
"""Write image to file FILENAME in FORMAT starting from
position FROM_COORDS."""
args = (self.name, 'write', filename)
if format:
args = args + ('-format', format)
if from_coords:
args = args + ('-from',) + tuple(from_coords)
self.tk.call(args)
class BitmapImage(Image):
"""Widget which can display a bitmap."""
def __init__(self, name=None, cnf={}, master=None, **kw):
"""Create a bitmap with NAME.
Valid resource names: background, data, file, foreground, maskdata, maskfile."""
Image.__init__(self, 'bitmap', name, cnf, master, **kw)
def image_names(): return _default_root.tk.call('image', 'names')
def image_types(): return _default_root.tk.call('image', 'types')
class Spinbox(Widget, XView):
"""spinbox widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a spinbox widget with the parent MASTER.
STANDARD OPTIONS
activebackground, background, borderwidth,
cursor, exportselection, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, insertbackground,
insertborderwidth, insertofftime,
insertontime, insertwidth, justify, relief,
repeatdelay, repeatinterval,
selectbackground, selectborderwidth
selectforeground, takefocus, textvariable
xscrollcommand.
WIDGET-SPECIFIC OPTIONS
buttonbackground, buttoncursor,
buttondownrelief, buttonuprelief,
command, disabledbackground,
disabledforeground, format, from,
invalidcommand, increment,
readonlybackground, state, to,
validate, validatecommand values,
width, wrap,
"""
Widget.__init__(self, master, 'spinbox', cnf, kw)
def bbox(self, index):
"""Return a tuple of X1,Y1,X2,Y2 coordinates for a
rectangle which encloses the character given by index.
The first two elements of the list give the x and y
coordinates of the upper-left corner of the screen
area covered by the character (in pixels relative
to the widget) and the last two elements give the
width and height of the character, in pixels. The
bounding box may refer to a region outside the
visible area of the window.
"""
return self.tk.call(self._w, 'bbox', index)
def delete(self, first, last=None):
"""Delete one or more elements of the spinbox.
First is the index of the first character to delete,
and last is the index of the character just after
the last one to delete. If last isn't specified it
defaults to first+1, i.e. a single character is
deleted. This command returns an empty string.
"""
return self.tk.call(self._w, 'delete', first, last)
def get(self):
"""Returns the spinbox's string"""
return self.tk.call(self._w, 'get')
def icursor(self, index):
"""Alter the position of the insertion cursor.
The insertion cursor will be displayed just before
the character given by index. Returns an empty string
"""
return self.tk.call(self._w, 'icursor', index)
def identify(self, x, y):
"""Returns the name of the widget at position x, y
Return value is one of: none, buttondown, buttonup, entry
"""
return self.tk.call(self._w, 'identify', x, y)
def index(self, index):
"""Returns the numerical index corresponding to index
"""
return self.tk.call(self._w, 'index', index)
def insert(self, index, s):
"""Insert string s at index
Returns an empty string.
"""
return self.tk.call(self._w, 'insert', index, s)
def invoke(self, element):
"""Causes the specified element to be invoked
The element could be buttondown or buttonup
triggering the action associated with it.
"""
return self.tk.call(self._w, 'invoke', element)
def scan(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'scan') + args)) or ()
def scan_mark(self, x):
"""Records x and the current view in the spinbox window;
used in conjunction with later scan dragto commands.
Typically this command is associated with a mouse button
press in the widget. It returns an empty string.
"""
return self.scan("mark", x)
def scan_dragto(self, x):
"""Compute the difference between the given x argument
and the x argument to the last scan mark command
It then adjusts the view left or right by 10 times the
difference in x-coordinates. This command is typically
associated with mouse motion events in the widget, to
produce the effect of dragging the spinbox at high speed
through the window. The return value is an empty string.
"""
return self.scan("dragto", x)
def selection(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'selection') + args)) or ()
def selection_adjust(self, index):
"""Locate the end of the selection nearest to the character
given by index,
Then adjust that end of the selection to be at index
(i.e including but not going beyond index). The other
end of the selection is made the anchor point for future
select to commands. If the selection isn't currently in
the spinbox, then a new selection is created to include
the characters between index and the most recent selection
anchor point, inclusive. Returns an empty string.
"""
return self.selection("adjust", index)
def selection_clear(self):
"""Clear the selection
If the selection isn't in this widget then the
command has no effect. Returns an empty string.
"""
return self.selection("clear")
def selection_element(self, element=None):
"""Sets or gets the currently selected element.
If a spinbutton element is specified, it will be
displayed depressed
"""
return self.selection("element", element)
###########################################################################
class LabelFrame(Widget):
"""labelframe widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a labelframe widget with the parent MASTER.
STANDARD OPTIONS
borderwidth, cursor, font, foreground,
highlightbackground, highlightcolor,
highlightthickness, padx, pady, relief,
takefocus, text
WIDGET-SPECIFIC OPTIONS
background, class, colormap, container,
height, labelanchor, labelwidget,
visual, width
"""
Widget.__init__(self, master, 'labelframe', cnf, kw)
########################################################################
class PanedWindow(Widget):
"""panedwindow widget."""
def __init__(self, master=None, cnf={}, **kw):
"""Construct a panedwindow widget with the parent MASTER.
STANDARD OPTIONS
background, borderwidth, cursor, height,
orient, relief, width
WIDGET-SPECIFIC OPTIONS
handlepad, handlesize, opaqueresize,
sashcursor, sashpad, sashrelief,
sashwidth, showhandle,
"""
Widget.__init__(self, master, 'panedwindow', cnf, kw)
def add(self, child, **kw):
"""Add a child widget to the panedwindow in a new pane.
The child argument is the name of the child widget
followed by pairs of arguments that specify how to
manage the windows. The possible options and values
are the ones accepted by the paneconfigure method.
"""
self.tk.call((self._w, 'add', child) + self._options(kw))
def remove(self, child):
"""Remove the pane containing child from the panedwindow
All geometry management options for child will be forgotten.
"""
self.tk.call(self._w, 'forget', child)
forget=remove
def identify(self, x, y):
"""Identify the panedwindow component at point x, y
If the point is over a sash or a sash handle, the result
is a two element list containing the index of the sash or
handle, and a word indicating whether it is over a sash
or a handle, such as {0 sash} or {2 handle}. If the point
is over any other part of the panedwindow, the result is
an empty list.
"""
return self.tk.call(self._w, 'identify', x, y)
def proxy(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'proxy') + args)) or ()
def proxy_coord(self):
"""Return the x and y pair of the most recent proxy location
"""
return self.proxy("coord")
def proxy_forget(self):
"""Remove the proxy from the display.
"""
return self.proxy("forget")
def proxy_place(self, x, y):
"""Place the proxy at the given x and y coordinates.
"""
return self.proxy("place", x, y)
def sash(self, *args):
"""Internal function."""
return self._getints(
self.tk.call((self._w, 'sash') + args)) or ()
def sash_coord(self, index):
"""Return the current x and y pair for the sash given by index.
Index must be an integer between 0 and 1 less than the
number of panes in the panedwindow. The coordinates given are
those of the top left corner of the region containing the sash.
pathName sash dragto index x y This command computes the
difference between the given coordinates and the coordinates
given to the last sash coord command for the given sash. It then
moves that sash the computed difference. The return value is the
empty string.
"""
return self.sash("coord", index)
def sash_mark(self, index):
"""Records x and y for the sash given by index;
Used in conjunction with later dragto commands to move the sash.
"""
return self.sash("mark", index)
def sash_place(self, index, x, y):
"""Place the sash given by index at the given coordinates
"""
return self.sash("place", index, x, y)
def panecget(self, child, option):
"""Query a management option for window.
Option may be any value allowed by the paneconfigure subcommand
"""
return self.tk.call(
(self._w, 'panecget') + (child, '-'+option))
def paneconfigure(self, tagOrId, cnf=None, **kw):
"""Query or modify the management options for window.
If no option is specified, returns a list describing all
of the available options for pathName. If option is
specified with no value, then the command returns a list
describing the one named option (this list will be identical
to the corresponding sublist of the value returned if no
option is specified). If one or more option-value pairs are
specified, then the command modifies the given widget
option(s) to have the given value(s); in this case the
command returns an empty string. The following options
are supported:
after window
Insert the window after the window specified. window
should be the name of a window already managed by pathName.
before window
Insert the window before the window specified. window
should be the name of a window already managed by pathName.
height size
Specify a height for the window. The height will be the
outer dimension of the window including its border, if
any. If size is an empty string, or if -height is not
specified, then the height requested internally by the
window will be used initially; the height may later be
adjusted by the movement of sashes in the panedwindow.
Size may be any value accepted by Tk_GetPixels.
minsize n
Specifies that the size of the window cannot be made
less than n. This constraint only affects the size of
the widget in the paned dimension -- the x dimension
for horizontal panedwindows, the y dimension for
vertical panedwindows. May be any value accepted by
Tk_GetPixels.
padx n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the X-direction. The value may have any of the forms
accepted by Tk_GetPixels.
pady n
Specifies a non-negative value indicating how much
extra space to leave on each side of the window in
the Y-direction. The value may have any of the forms
accepted by Tk_GetPixels.
sticky style
If a window's pane is larger than the requested
dimensions of the window, this option may be used
to position (or stretch) the window within its pane.
Style is a string that contains zero or more of the
characters n, s, e or w. The string can optionally
contains spaces or commas, but they are ignored. Each
letter refers to a side (north, south, east, or west)
that the window will "stick" to. If both n and s
(or e and w) are specified, the window will be
stretched to fill the entire height (or width) of
its cavity.
width size
Specify a width for the window. The width will be
the outer dimension of the window including its
border, if any. If size is an empty string, or
if -width is not specified, then the width requested
internally by the window will be used initially; the
width may later be adjusted by the movement of sashes
in the panedwindow. Size may be any value accepted by
Tk_GetPixels.
"""
if cnf is None and not kw:
cnf = {}
for x in self.tk.split(
self.tk.call(self._w,
'paneconfigure', tagOrId)):
cnf[x[0][1:]] = (x[0][1:],) + x[1:]
return cnf
if type(cnf) == StringType and not kw:
x = self.tk.split(self.tk.call(
self._w, 'paneconfigure', tagOrId, '-'+cnf))
return (x[0][1:],) + x[1:]
self.tk.call((self._w, 'paneconfigure', tagOrId) +
self._options(cnf, kw))
paneconfig = paneconfigure
def panes(self):
"""Returns an ordered list of the child panes."""
return self.tk.call(self._w, 'panes')
######################################################################
# Extensions:
class Studbutton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'studbutton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
class Tributton(Button):
def __init__(self, master=None, cnf={}, **kw):
Widget.__init__(self, master, 'tributton', cnf, kw)
self.bind('<Any-Enter>', self.tkButtonEnter)
self.bind('<Any-Leave>', self.tkButtonLeave)
self.bind('<1>', self.tkButtonDown)
self.bind('<ButtonRelease-1>', self.tkButtonUp)
self['fg'] = self['bg']
self['activebackground'] = self['bg']
######################################################################
# Test:
def _test():
root = Tk()
text = "This is Tcl/Tk version %s" % TclVersion
if TclVersion >= 8.1:
try:
text = text + unicode("\nThis should be a cedilla: \347",
"iso-8859-1")
except NameError:
pass # no unicode support
label = Label(root, text=text)
label.pack()
test = Button(root, text="Click me!",
command=lambda root=root: root.test.configure(
text="[%s]" % root.test['text']))
test.pack()
root.test = test
quit = Button(root, text="QUIT", command=root.destroy)
quit.pack()
# The following three commands are needed so the window pops
# up on top on Windows...
root.iconify()
root.update()
root.deiconify()
root.mainloop()
if __name__ == '__main__':
_test()
| gpl-2.0 |
B-MOOC/edx-platform | lms/djangoapps/mobile_api/social_facebook/groups/tests.py | 47 | 8153 | """
Tests for groups
"""
import httpretty
from ddt import ddt, data
from django.conf import settings
from django.core.urlresolvers import reverse
from courseware.tests.factories import UserFactory
from ..test_utils import SocialFacebookTestCase
@ddt
class TestGroups(SocialFacebookTestCase):
"""
Tests for /api/mobile/v0.5/social/facebook/groups/...
"""
def setUp(self):
super(TestGroups, self).setUp()
self.user = UserFactory.create()
self.client.login(username=self.user.username, password='test')
# Group Creation and Deletion Tests
@httpretty.activate
def test_create_new_open_group(self):
group_id = '12345678'
status_code = 200
self.set_facebook_interceptor_for_access_token()
self.set_facebook_interceptor_for_groups({'id': group_id}, status_code)
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(
url,
{
'name': 'TheBestGroup',
'description': 'The group for the best people',
'privacy': 'open'
}
)
self.assertEqual(response.status_code, status_code)
self.assertTrue('id' in response.data) # pylint: disable=E1103
self.assertEqual(response.data['id'], group_id) # pylint: disable=E1103
@httpretty.activate
def test_create_new_closed_group(self):
group_id = '12345678'
status_code = 200
self.set_facebook_interceptor_for_access_token()
self.set_facebook_interceptor_for_groups({'id': group_id}, status_code)
# Create new group
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(
url,
{
'name': 'TheBestGroup',
'description': 'The group for the best people',
'privacy': 'closed'
}
)
self.assertEqual(response.status_code, status_code)
self.assertTrue('id' in response.data) # pylint: disable=E1103
self.assertEqual(response.data['id'], group_id) # pylint: disable=E1103
def test_create_new_group_no_name(self):
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(url, {})
self.assertEqual(response.status_code, 400)
def test_create_new_group_with_invalid_name(self):
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(url, {'invalid_name': 'TheBestGroup'})
self.assertEqual(response.status_code, 400)
def test_create_new_group_with_invalid_privacy(self):
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(
url,
{'name': 'TheBestGroup', 'privacy': 'half_open_half_closed'}
)
self.assertEqual(response.status_code, 400)
@httpretty.activate
def test_delete_group_that_exists(self):
# Create new group
group_id = '12345678'
status_code = 200
self.set_facebook_interceptor_for_access_token()
self.set_facebook_interceptor_for_groups({'id': group_id}, status_code)
url = reverse('create-delete-group', kwargs={'group_id': ''})
response = self.client.post(
url,
{
'name': 'TheBestGroup',
'description': 'The group for the best people',
'privacy': 'open'
}
)
self.assertEqual(response.status_code, status_code)
self.assertTrue('id' in response.data) # pylint: disable=E1103
# delete group
httpretty.register_uri(
httpretty.POST,
'https://graph.facebook.com/{}/{}/groups/{}?access_token=FakeToken&method=delete'.format(
settings.FACEBOOK_API_VERSION,
settings.FACEBOOK_APP_ID,
group_id
),
body='{"success": "true"}',
status=status_code
)
response = self.delete_group(response.data['id']) # pylint: disable=E1101
self.assertTrue(response.status_code, status_code) # pylint: disable=E1101
@httpretty.activate
def test_delete(self):
group_id = '12345678'
status_code = 400
httpretty.register_uri(
httpretty.GET,
'https://graph.facebook.com/oauth/access_token?client_secret={}&grant_type=client_credentials&client_id={}'
.format(
settings.FACEBOOK_APP_SECRET,
settings.FACEBOOK_APP_ID
),
body='FakeToken=FakeToken',
status=200
)
httpretty.register_uri(
httpretty.POST,
'https://graph.facebook.com/{}/{}/groups/{}?access_token=FakeToken&method=delete'.format(
settings.FACEBOOK_API_VERSION,
settings.FACEBOOK_APP_ID,
group_id
),
body='{"error": {"message": "error message"}}',
status=status_code
)
response = self.delete_group(group_id)
self.assertTrue(response.status_code, status_code)
# Member addition and Removal tests
@data('1234,,,,5678,,', 'this00is00not00a00valid00id', '1234,abc,5678', '')
def test_invite_single_member_malformed_member_id(self, member_id):
group_id = '111111111111111'
response = self.invite_to_group(group_id, member_id)
self.assertEqual(response.status_code, 400)
@httpretty.activate
def test_invite_single_member(self):
group_id = '111111111111111'
member_id = '44444444444444444'
status_code = 200
self.set_facebook_interceptor_for_access_token()
self.set_facebook_interceptor_for_members({'success': 'True'}, status_code, group_id, member_id)
response = self.invite_to_group(group_id, member_id)
self.assertEqual(response.status_code, status_code)
self.assertTrue('success' in response.data[member_id]) # pylint: disable=E1103
@httpretty.activate
def test_invite_multiple_members_successfully(self):
member_ids = '222222222222222,333333333333333,44444444444444444'
group_id = '111111111111111'
status_code = 200
self.set_facebook_interceptor_for_access_token()
for member_id in member_ids.split(','):
self.set_facebook_interceptor_for_members({'success': 'True'}, status_code, group_id, member_id)
response = self.invite_to_group(group_id, member_ids)
self.assertEqual(response.status_code, status_code)
for member_id in member_ids.split(','):
self.assertTrue('success' in response.data[member_id]) # pylint: disable=E1103
@httpretty.activate
def test_invite_single_member_unsuccessfully(self):
group_id = '111111111111111'
member_id = '44444444444444444'
status_code = 400
self.set_facebook_interceptor_for_access_token()
self.set_facebook_interceptor_for_members(
{'error': {'message': 'error message'}},
status_code, group_id, member_id
)
response = self.invite_to_group(group_id, member_id)
self.assertEqual(response.status_code, 200)
self.assertTrue('error message' in response.data[member_id]) # pylint: disable=E1103
@httpretty.activate
def test_invite_multiple_members_unsuccessfully(self):
member_ids = '222222222222222,333333333333333,44444444444444444'
group_id = '111111111111111'
status_code = 400
self.set_facebook_interceptor_for_access_token()
for member_id in member_ids.split(','):
self.set_facebook_interceptor_for_members(
{'error': {'message': 'error message'}},
status_code, group_id, member_id
)
response = self.invite_to_group(group_id, member_ids)
self.assertEqual(response.status_code, 200)
for member_id in member_ids.split(','):
self.assertTrue('error message' in response.data[member_id]) # pylint: disable=E1103
| agpl-3.0 |
Godiyos/python-for-android | python3-alpha/python3-src/Lib/tkinter/constants.py | 375 | 1493 | # Symbolic constants for Tk
# Booleans
NO=FALSE=OFF=0
YES=TRUE=ON=1
# -anchor and -sticky
N='n'
S='s'
W='w'
E='e'
NW='nw'
SW='sw'
NE='ne'
SE='se'
NS='ns'
EW='ew'
NSEW='nsew'
CENTER='center'
# -fill
NONE='none'
X='x'
Y='y'
BOTH='both'
# -side
LEFT='left'
TOP='top'
RIGHT='right'
BOTTOM='bottom'
# -relief
RAISED='raised'
SUNKEN='sunken'
FLAT='flat'
RIDGE='ridge'
GROOVE='groove'
SOLID = 'solid'
# -orient
HORIZONTAL='horizontal'
VERTICAL='vertical'
# -tabs
NUMERIC='numeric'
# -wrap
CHAR='char'
WORD='word'
# -align
BASELINE='baseline'
# -bordermode
INSIDE='inside'
OUTSIDE='outside'
# Special tags, marks and insert positions
SEL='sel'
SEL_FIRST='sel.first'
SEL_LAST='sel.last'
END='end'
INSERT='insert'
CURRENT='current'
ANCHOR='anchor'
ALL='all' # e.g. Canvas.delete(ALL)
# Text widget and button states
NORMAL='normal'
DISABLED='disabled'
ACTIVE='active'
# Canvas state
HIDDEN='hidden'
# Menu item types
CASCADE='cascade'
CHECKBUTTON='checkbutton'
COMMAND='command'
RADIOBUTTON='radiobutton'
SEPARATOR='separator'
# Selection modes for list boxes
SINGLE='single'
BROWSE='browse'
MULTIPLE='multiple'
EXTENDED='extended'
# Activestyle for list boxes
# NONE='none' is also valid
DOTBOX='dotbox'
UNDERLINE='underline'
# Various canvas styles
PIESLICE='pieslice'
CHORD='chord'
ARC='arc'
FIRST='first'
LAST='last'
BUTT='butt'
PROJECTING='projecting'
ROUND='round'
BEVEL='bevel'
MITER='miter'
# Arguments to xview/yview
MOVETO='moveto'
SCROLL='scroll'
UNITS='units'
PAGES='pages'
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.