repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
umitproject/network-admin
netadmin/events/models.py
1
8198
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (C) 2011 Adriano Monteiro Marques # # Authors: Amit pal<amix.pal@gmail.com> # Piotrek Wasilewski <wasilewski.piotrek@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. try: import simplejson as json except ImportError: import json from django.db import models from django.utils.translation import ugettext as _ from django.contrib.auth.models import User from django.template.defaultfilters import slugify import pytz import time from netadmin.networks.models import Host from netadmin.users.models import UserProfile ALERT_LEVELS = ( (0, _('No alert')), (1, _('Low')), (2, _('Medium')), (3, _('High')) ) class EventFieldNotFound(Exception): pass class EventFieldsNotValid(Exception): pass class EventTypeCategory(models.Model): """Represents category to which an event type may be assigned """ name = models.CharField(max_length=50, verbose_name=_("Name")) slug = models.SlugField(blank=True, verbose_name=_("Message")) user = models.ForeignKey(User, verbose_name=_("User")) parent = models.OneToOneField('self', verbose_name=_("Parent category"), null=True, blank=True, unique=True) def __unicode__(self): return self.name def save(self, *args, **kwargs): if not self.pk: self.slug = slugify(self.name) super(EventTypeCategory, self).save(*args, **kwargs) class EventType(models.Model): """ Describes type of an event, e.g. INFO, CRITICAL etc. Note that every event type is linked with user - its owner. That is because events types are created automatically, when events are reported so every user may have different set of types. Alert level has no effect on reporting events or managing them. This field only indicates importance of events and is used to distinguish those of them which should be treated differently. """ name = models.CharField(max_length=50) name_slug = models.SlugField(blank=True) user = models.ForeignKey(User) alert_level= models.SmallIntegerField(choices=ALERT_LEVELS, default=0) notify = models.BooleanField(default=False) category = models.OneToOneField(EventTypeCategory, unique=True, null=True) def __unicode__(self): return self.name def save(self, *args, **kwargs): if not self.pk: self.name_slug = slugify(self.name) super(EventType, self).save(*args, **kwargs) def delete(self, *args, **kwargs): # delete relations between event type and reports related = self.reportmetaeventtype_set.all() related.delete() super(EventType, self).delete(*args, **kwargs) def events(self): return self.event_set.all() def pending_events(self): return self.events().filter(checked=False) class Event(models.Model): """ Event model class represents single notification reported to the Network Administrator. The following fields are defined: * message - description of an event * short_message - shorter description (could be used as a title) * message_slug - slug made of short_message * timestamp - moment, when event occured on host * protocol - network protocol * event_type - foreign key to the EventType object which simply stores short and readable event name like **INFO** or **WARNING** * source_host - foreign key to the Host object; this is the host from where the event came * fields_class - identifier of the class of additional fields * fields_data - serialized fields that contain more specific data about the event * checked - True means that event has been marked by user as known (actually this field is important only for alerts, where information about event status is really important) Note: Although event hasn't *user* field specified, we can say that event belongs to the user who ownes the source host. """ message = models.TextField() short_message = models.CharField(max_length=200) message_slug = models.SlugField() timestamp = models.DateTimeField() protocol = models.CharField(max_length=30) event_type = models.ForeignKey(EventType) source_host = models.ForeignKey(Host, blank=True) fields_class = models.CharField(max_length=50, null=True, blank=True) fields_data = models.TextField(null=True, blank=True) checked = models.BooleanField(default=False) def __unicode__(self): return "'%s' at %s" % (self.message, self.timestamp) def save(self, *args, **kwargs): if not self.pk: self.message_slug = slugify(self.short_message) super(Event, self).save(*args, **kwargs) def get_details(self): """Returns event details extracted from monitoring module fields""" try: fields = json.loads(self.fields_data) except ValueError: raise EventFieldsNotValid(_("Cannot decode fields data.")) return fields fields = property(get_details) def get_localized_timestamp(self): host_timezone = pytz.timezone(self.source_host.timezone) user = User.objects.get(username = self.source_host.user) user_obj = UserProfile.objects.get(id = user.id) if user_obj.timezone == u'': user_obj.timezone = self.source_host.timezone user_timezone = pytz.timezone(user_obj.timezone) localized_datetime_host = host_timezone.localize(self.timestamp) localized_datetime_user = user_timezone.localize(self.timestamp) differ_datetime_event = localized_datetime_host - localized_datetime_user event_time = self.timestamp - differ_datetime_event return event_time def get_field(self, field_name, default=None): try: fields = self.get_details() except EventFieldsNotValid: return default if field_name not in fields: return default #raise EventFieldNotFound(_("The field '%s' is not defined for this event.") % field_name) return fields[field_name] def _html_message(self): return self.message.replace('\n', '<br />') html_message = property(_html_message) def user(self): return self.source_host.user def get_html(self): """Notifier support: returns event data in HTML""" title = '%s %s' % (str(self.timestamp), self.event_type.name) return '<h2>%s</h2><p>%s</p>' % (title, self.html_message) def api_detail(self): return { 'event_id': self.pk, 'description': self.message, 'short_description': self.short_message, 'event_type': self.event_type.name, 'timestamp': str(self.timestamp), 'protocol': self.protocol, 'source_host_id': self.source_host.pk, 'fields_class': self.fields_class, 'fields_data': self.fields_data } def api_list(self): return { 'id': self.pk, 'short_description': self.short_message } class EventComment(models.Model): comment = models.TextField() user = models.CharField(max_length=30, null = False, blank=True) timestamp = models.DateTimeField(null=False, blank=True) event = models.ForeignKey(Event, blank=False, null=False) def __unicode__(self): return "'%s' at %s" % (self.comment)
agpl-3.0
sushantgoel/Flask
Work/Trivia - Module 5/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py
1229
1457
from __future__ import absolute_import, division, unicode_literals from xml.dom import Node import gettext _ = gettext.gettext from . import _base class TreeWalker(_base.NonRecursiveTreeWalker): def getNodeDetails(self, node): if node.nodeType == Node.DOCUMENT_TYPE_NODE: return _base.DOCTYPE, node.name, node.publicId, node.systemId elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE): return _base.TEXT, node.nodeValue elif node.nodeType == Node.ELEMENT_NODE: attrs = {} for attr in list(node.attributes.keys()): attr = node.getAttributeNode(attr) if attr.namespaceURI: attrs[(attr.namespaceURI, attr.localName)] = attr.value else: attrs[(None, attr.name)] = attr.value return (_base.ELEMENT, node.namespaceURI, node.nodeName, attrs, node.hasChildNodes()) elif node.nodeType == Node.COMMENT_NODE: return _base.COMMENT, node.nodeValue elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE): return (_base.DOCUMENT,) else: return _base.UNKNOWN, node.nodeType def getFirstChild(self, node): return node.firstChild def getNextSibling(self, node): return node.nextSibling def getParentNode(self, node): return node.parentNode
apache-2.0
Azure/WALinuxAgent
azurelinuxagent/common/version.py
1
10864
# Copyright 2019 Microsoft Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Requires Python 2.6+ and Openssl 1.0+ # import os import re import platform import sys import azurelinuxagent.common.conf as conf import azurelinuxagent.common.utils.shellutil as shellutil from azurelinuxagent.common.utils.flexible_version import FlexibleVersion from azurelinuxagent.common.future import ustr, get_linux_distribution __DAEMON_VERSION_ENV_VARIABLE = '_AZURE_GUEST_AGENT_DAEMON_VERSION_' """ The daemon process sets this variable's value to the daemon's version number. The variable is set only on versions >= 2.2.53 """ def set_daemon_version(version): """ Sets the value of the _AZURE_GUEST_AGENT_DAEMON_VERSION_ environment variable. The given 'version' can be a FlexibleVersion or a string that can be parsed into a FlexibleVersion """ flexible_version = version if isinstance(version, FlexibleVersion) else FlexibleVersion(version) os.environ[__DAEMON_VERSION_ENV_VARIABLE] = ustr(flexible_version) def get_daemon_version(): """ Retrieves the value of the _AZURE_GUEST_AGENT_DAEMON_VERSION_ environment variable. The value indicates the version of the daemon that started the current agent process or, if the current process is the daemon, the version of the current process. If the variable is not set (because the agent is < 2.2.53, or the process was not started by the daemon and the process is not the daemon itself) the function returns "0.0.0.0" """ if __DAEMON_VERSION_ENV_VARIABLE in os.environ: return FlexibleVersion(os.environ[__DAEMON_VERSION_ENV_VARIABLE]) return FlexibleVersion("0.0.0.0") def get_f5_platform(): """ Add this workaround for detecting F5 products because BIG-IP/IQ/etc do not show their version info in the /etc/product-version location. Instead, the version and product information is contained in the /VERSION file. """ result = [None, None, None, None] f5_version = re.compile("^Version: (\d+\.\d+\.\d+)") # pylint: disable=W1401 f5_product = re.compile("^Product: ([\w-]+)") # pylint: disable=W1401 with open('/VERSION', 'r') as fh: content = fh.readlines() for line in content: version_matches = f5_version.match(line) product_matches = f5_product.match(line) if version_matches: result[1] = version_matches.group(1) elif product_matches: result[3] = product_matches.group(1) if result[3] == "BIG-IP": result[0] = "bigip" result[2] = "bigip" elif result[3] == "BIG-IQ": result[0] = "bigiq" result[2] = "bigiq" elif result[3] == "iWorkflow": result[0] = "iworkflow" result[2] = "iworkflow" return result def get_checkpoint_platform(): take = build = release = "" full_name = open("/etc/cp-release").read().strip() with open("/etc/cloud-version") as f: for line in f: k, _, v = line.partition(": ") v = v.strip() if k == "release": release = v elif k == "take": take = v elif k == "build": build = v return ["gaia", take + "." + build, release, full_name] def get_distro(): if 'FreeBSD' in platform.system(): release = re.sub('\-.*\Z', '', ustr(platform.release())) # pylint: disable=W1401 osinfo = ['freebsd', release, '', 'freebsd'] elif 'OpenBSD' in platform.system(): release = re.sub('\-.*\Z', '', ustr(platform.release())) # pylint: disable=W1401 osinfo = ['openbsd', release, '', 'openbsd'] elif 'Linux' in platform.system(): osinfo = get_linux_distribution(0, 'alpine') elif 'NS-BSD' in platform.system(): release = re.sub('\-.*\Z', '', ustr(platform.release())) # pylint: disable=W1401 osinfo = ['nsbsd', release, '', 'nsbsd'] else: try: # dist() removed in Python 3.8 osinfo = list(platform.dist()) + [''] # pylint: disable=W1505,E1101 except Exception: osinfo = ['UNKNOWN', 'FFFF', '', ''] # The platform.py lib has issue with detecting oracle linux distribution. # Merge the following patch provided by oracle as a temporary fix. if os.path.exists("/etc/oracle-release"): osinfo[2] = "oracle" osinfo[3] = "Oracle Linux" if os.path.exists("/etc/euleros-release"): osinfo[0] = "euleros" if os.path.exists("/etc/mariner-release"): osinfo[0] = "mariner" # The platform.py lib has issue with detecting BIG-IP linux distribution. # Merge the following patch provided by F5. if os.path.exists("/shared/vadc"): osinfo = get_f5_platform() if os.path.exists("/etc/cp-release"): osinfo = get_checkpoint_platform() if os.path.exists("/home/guestshell/azure"): osinfo = ['iosxe', 'csr1000v', '', 'Cisco IOSXE Linux'] # Remove trailing whitespace and quote in distro name osinfo[0] = osinfo[0].strip('"').strip(' ').lower() return osinfo COMMAND_ABSENT = ustr("Absent") COMMAND_FAILED = ustr("Failed") def get_lis_version(): """ This uses the Linux kernel's 'modinfo' command to retrieve the "version" field for the "hv_vmbus" kernel module (the LIS drivers). This is the documented method to retrieve the LIS module version. Every Linux guest on Hyper-V will have this driver, but it may not be installed as a module (it could instead be built into the kernel). In that case, this will return "Absent" instead of the version, indicating the driver version can be deduced from the kernel version. It will only return "Failed" in the presence of an exception. This function is used to generate telemetry for the version of the LIS drivers installed on the VM. The function and associated telemetry can be removed after a few releases. """ try: modinfo_output = shellutil.run_command(["modinfo", "-F", "version", "hv_vmbus"]) if modinfo_output: return modinfo_output # If the system doesn't have LIS drivers, 'modinfo' will # return nothing on stdout, which will cause 'run_command' # to return an empty string. return COMMAND_ABSENT except Exception: # Ignore almost every possible exception because this is in a # critical code path. Unfortunately the logger isn't already # imported in this module or we'd log this too. return COMMAND_FAILED def has_logrotate(): try: logrotate_version = shellutil.run_command(["logrotate", "--version"]).split("\n")[0] return logrotate_version except shellutil.CommandError: # A non-zero return code means that logrotate isn't present on # the system; --version shouldn't fail otherwise. return COMMAND_ABSENT except Exception: return COMMAND_FAILED AGENT_NAME = "WALinuxAgent" AGENT_LONG_NAME = "Azure Linux Agent" # # IMPORTANT: Please be sure that the version is always 9.9.9.9 on the develop branch. Automation requires this, otherwise # DCR may test the wrong agent version. # # When doing a release, be sure to use the actual agent version. Current agent version: 2.3.0.2 # AGENT_VERSION = '9.9.9.9' AGENT_LONG_VERSION = "{0}-{1}".format(AGENT_NAME, AGENT_VERSION) AGENT_DESCRIPTION = """ The Azure Linux Agent supports the provisioning and running of Linux VMs in the Azure cloud. This package should be installed on Linux disk images that are built to run in the Azure environment. """ AGENT_DIR_GLOB = "{0}-*".format(AGENT_NAME) AGENT_PKG_GLOB = "{0}-*.zip".format(AGENT_NAME) AGENT_PATTERN = "{0}-(.*)".format(AGENT_NAME) AGENT_NAME_PATTERN = re.compile(AGENT_PATTERN) AGENT_PKG_PATTERN = re.compile(AGENT_PATTERN+"\.zip") # pylint: disable=W1401 AGENT_DIR_PATTERN = re.compile(".*/{0}".format(AGENT_PATTERN)) # The execution mode of the VM - IAAS or PAAS. Linux VMs are only executed in IAAS mode. AGENT_EXECUTION_MODE = "IAAS" EXT_HANDLER_PATTERN = b".*/WALinuxAgent-(\d+.\d+.\d+[.\d+]*).*-run-exthandlers" # pylint: disable=W1401 EXT_HANDLER_REGEX = re.compile(EXT_HANDLER_PATTERN) __distro__ = get_distro() DISTRO_NAME = __distro__[0] DISTRO_VERSION = __distro__[1] DISTRO_CODE_NAME = __distro__[2] DISTRO_FULL_NAME = __distro__[3] PY_VERSION = sys.version_info PY_VERSION_MAJOR = sys.version_info[0] PY_VERSION_MINOR = sys.version_info[1] PY_VERSION_MICRO = sys.version_info[2] # Set the CURRENT_AGENT and CURRENT_VERSION to match the agent directory name # - This ensures the agent will "see itself" using the same name and version # as the code that downloads agents. def set_current_agent(): path = os.getcwd() lib_dir = conf.get_lib_dir() if lib_dir[-1] != os.path.sep: lib_dir += os.path.sep agent = path[len(lib_dir):].split(os.path.sep)[0] match = AGENT_NAME_PATTERN.match(agent) if match: version = match.group(1) else: agent = AGENT_LONG_VERSION version = AGENT_VERSION return agent, FlexibleVersion(version) def is_agent_package(path): path = os.path.basename(path) return not re.match(AGENT_PKG_PATTERN, path) is None def is_agent_path(path): path = os.path.basename(path) return not re.match(AGENT_NAME_PATTERN, path) is None CURRENT_AGENT, CURRENT_VERSION = set_current_agent() def set_goal_state_agent(): agent = None if os.path.isdir("/proc"): pids = [pid for pid in os.listdir('/proc') if pid.isdigit()] else: pids = [] for pid in pids: try: pname = open(os.path.join('/proc', pid, 'cmdline'), 'rb').read() match = EXT_HANDLER_REGEX.match(pname) if match: agent = match.group(1) if PY_VERSION_MAJOR > 2: agent = agent.decode('UTF-8') break except IOError: continue if agent is None: agent = CURRENT_VERSION return agent GOAL_STATE_AGENT_VERSION = set_goal_state_agent() def is_current_agent_installed(): return CURRENT_AGENT == AGENT_LONG_VERSION
apache-2.0
taedla01/MissionPlanner
Lib/distutils/command/build_clib.py
50
8340
"""distutils.command.build_clib Implements the Distutils 'build_clib' command, to build a C/C++ library that is included in the module distribution and needed by an extension module.""" __revision__ = "$Id$" # XXX this module has *lots* of code ripped-off quite transparently from # build_ext.py -- not surprisingly really, as the work required to build # a static library from a collection of C source files is not really all # that different from what's required to build a shared object file from # a collection of C source files. Nevertheless, I haven't done the # necessary refactoring to account for the overlap in code between the # two modules, mainly because a number of subtle details changed in the # cut 'n paste. Sigh. import os from distutils.core import Command from distutils.errors import DistutilsSetupError from distutils.ccompiler import customize_compiler from distutils import log def show_compilers(): from distutils.ccompiler import show_compilers show_compilers() class build_clib(Command): description = "build C/C++ libraries used by Python extensions" user_options = [ ('build-clib=', 'b', "directory to build C/C++ libraries to"), ('build-temp=', 't', "directory to put temporary build by-products"), ('debug', 'g', "compile with debugging information"), ('force', 'f', "forcibly build everything (ignore file timestamps)"), ('compiler=', 'c', "specify the compiler type"), ] boolean_options = ['debug', 'force'] help_options = [ ('help-compiler', None, "list available compilers", show_compilers), ] def initialize_options(self): self.build_clib = None self.build_temp = None # List of libraries to build self.libraries = None # Compilation options for all libraries self.include_dirs = None self.define = None self.undef = None self.debug = None self.force = 0 self.compiler = None def finalize_options(self): # This might be confusing: both build-clib and build-temp default # to build-temp as defined by the "build" command. This is because # I think that C libraries are really just temporary build # by-products, at least from the point of view of building Python # extensions -- but I want to keep my options open. self.set_undefined_options('build', ('build_temp', 'build_clib'), ('build_temp', 'build_temp'), ('compiler', 'compiler'), ('debug', 'debug'), ('force', 'force')) self.libraries = self.distribution.libraries if self.libraries: self.check_library_list(self.libraries) if self.include_dirs is None: self.include_dirs = self.distribution.include_dirs or [] if isinstance(self.include_dirs, str): self.include_dirs = self.include_dirs.split(os.pathsep) # XXX same as for build_ext -- what about 'self.define' and # 'self.undef' ? def run(self): if not self.libraries: return # Yech -- this is cut 'n pasted from build_ext.py! from distutils.ccompiler import new_compiler self.compiler = new_compiler(compiler=self.compiler, dry_run=self.dry_run, force=self.force) customize_compiler(self.compiler) if self.include_dirs is not None: self.compiler.set_include_dirs(self.include_dirs) if self.define is not None: # 'define' option is a list of (name,value) tuples for (name,value) in self.define: self.compiler.define_macro(name, value) if self.undef is not None: for macro in self.undef: self.compiler.undefine_macro(macro) self.build_libraries(self.libraries) def check_library_list(self, libraries): """Ensure that the list of libraries is valid. `library` is presumably provided as a command option 'libraries'. This method checks that it is a list of 2-tuples, where the tuples are (library_name, build_info_dict). Raise DistutilsSetupError if the structure is invalid anywhere; just returns otherwise. """ if not isinstance(libraries, list): raise DistutilsSetupError, \ "'libraries' option must be a list of tuples" for lib in libraries: if not isinstance(lib, tuple) and len(lib) != 2: raise DistutilsSetupError, \ "each element of 'libraries' must a 2-tuple" name, build_info = lib if not isinstance(name, str): raise DistutilsSetupError, \ "first element of each tuple in 'libraries' " + \ "must be a string (the library name)" if '/' in name or (os.sep != '/' and os.sep in name): raise DistutilsSetupError, \ ("bad library name '%s': " + "may not contain directory separators") % \ lib[0] if not isinstance(build_info, dict): raise DistutilsSetupError, \ "second element of each tuple in 'libraries' " + \ "must be a dictionary (build info)" def get_library_names(self): # Assume the library list is valid -- 'check_library_list()' is # called from 'finalize_options()', so it should be! if not self.libraries: return None lib_names = [] for (lib_name, build_info) in self.libraries: lib_names.append(lib_name) return lib_names def get_source_files(self): self.check_library_list(self.libraries) filenames = [] for (lib_name, build_info) in self.libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError, \ ("in 'libraries' option (library '%s'), " "'sources' must be present and must be " "a list of source filenames") % lib_name filenames.extend(sources) return filenames def build_libraries(self, libraries): for (lib_name, build_info) in libraries: sources = build_info.get('sources') if sources is None or not isinstance(sources, (list, tuple)): raise DistutilsSetupError, \ ("in 'libraries' option (library '%s'), " + "'sources' must be present and must be " + "a list of source filenames") % lib_name sources = list(sources) log.info("building '%s' library", lib_name) # First, compile the source code to object files in the library # directory. (This should probably change to putting object # files in a temporary build directory.) macros = build_info.get('macros') include_dirs = build_info.get('include_dirs') objects = self.compiler.compile(sources, output_dir=self.build_temp, macros=macros, include_dirs=include_dirs, debug=self.debug) # Now "link" the object files together into a static library. # (On Unix at least, this isn't really linking -- it just # builds an archive. Whatever.) self.compiler.create_static_lib(objects, lib_name, output_dir=self.build_clib, debug=self.debug)
gpl-3.0
AlmostBetterNetwork/pinecast
assets/management/commands/cleanup_dangling_superseded.py
2
1125
from django.core.management.base import BaseCommand from assets.models import Asset from podcasts.models import PodcastEpisode class Command(BaseCommand): help = 'Cleans up dangling references to superseded files on eps awaiting import' def handle(self, *args, **options): eps = PodcastEpisode.objects.filter(awaiting_import=True) for ep in eps: helped = False if ep.audio.is_superseded(): new_audio = Asset.objects.filter(supersedes=ep.audio)[0] ep.audio = new_audio helped = True if ep.artwork.is_superseded(): artworks = Asset.objects.filter(supersedes=ep.artwork) if artworks.count() == 1: new_artwork = artworks[0] elif ep.podcast.cover_art.supersedes == ep.artwork: new_artwork = ep.podcast.cover_art else: new_artwork = None ep.artwork = None helped = True if helped: ep.awaiting_import = False ep.save()
apache-2.0
pcm17/tensorflow
tensorflow/python/layers/utils.py
33
5515
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= # pylint: disable=unused-import,g-bad-import-order """Contains layer utilies for input validation and format conversion. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import six from six.moves import xrange # pylint: disable=redefined-builtin import numpy as np from tensorflow.python.ops import variables from tensorflow.python.ops import control_flow_ops from tensorflow.python.framework import ops from tensorflow.python.framework import tensor_util def convert_data_format(data_format, ndim): if data_format == 'channels_last': if ndim == 3: return 'NWC' elif ndim == 4: return 'NHWC' elif ndim == 5: return 'NDHWC' else: raise ValueError('Input rank not supported:', ndim) elif data_format == 'channels_first': if ndim == 3: return 'NCW' elif ndim == 4: return 'NCHW' elif ndim == 5: raise ValueError('Data format "channels_first" not supported for ' 'inputs with rank 5.') else: raise ValueError('Input rank not supported:', ndim) else: raise ValueError('Invalid data_format:', data_format) def normalize_tuple(value, n, name): """Transforms a single integer or iterable of integers into an integer tuple. Arguments: value: The value to validate and convert. Could an int, or any iterable of ints. n: The size of the tuple to be returned. name: The name of the argument being validated, e.g. "strides" or "kernel_size". This is only used to format error messages. Returns: A tuple of n integers. Raises: ValueError: If something else than an int/long or iterable thereof was passed. """ if isinstance(value, int): return (value,) * n else: try: value_tuple = tuple(value) except TypeError: raise ValueError('The `' + name + '` argument must be a tuple of ' + str(n) + ' integers. Received: ' + str(value)) if len(value_tuple) != n: raise ValueError('The `' + name + '` argument must be a tuple of ' + str(n) + ' integers. Received: ' + str(value)) for single_value in value_tuple: try: int(single_value) except ValueError: raise ValueError('The `' + name + '` argument must be a tuple of ' + str(n) + ' integers. Received: ' + str(value) + ' ' 'including element ' + str(single_value) + ' of type' + ' ' + str(type(single_value))) return value_tuple def normalize_data_format(value): data_format = value.lower() if data_format not in {'channels_first', 'channels_last'}: raise ValueError('The `data_format` argument must be one of ' '"channels_first", "channels_last". Received: ' + str(value)) return data_format def normalize_padding(value): padding = value.lower() if padding not in {'valid', 'same'}: raise ValueError('The `padding` argument must be one of "valid", "same". ' 'Received: ' + str(padding)) return padding def smart_cond(pred, fn1, fn2, name=None): """Return either `fn1()` or `fn2()` based on the boolean predicate `pred`. If `pred` is a bool or has a constant value, we return either `fn1()` or `fn2()`, otherwise we use `tf.cond` to dynamically route to both. Arguments: pred: A scalar determining whether to return the result of `fn1` or `fn2`. fn1: The callable to be performed if pred is true. fn2: The callable to be performed if pred is false. name: Optional name prefix when using `tf.cond`. Returns: Tensors returned by the call to either `fn1` or `fn2`. Raises: TypeError is fn1 or fn2 is not callable. """ if not callable(fn1): raise TypeError('`fn1` must be callable.') if not callable(fn2): raise TypeError('`fn2` must be callable.') pred_value = constant_value(pred) if pred_value is not None: if pred_value: return fn1() else: return fn2() else: return control_flow_ops.cond(pred, fn1, fn2, name) def constant_value(pred): """Return the bool value for `pred`, or None if `pred` had a dynamic value. Arguments: pred: A scalar, either a Python bool or a TensorFlow boolean variable or tensor. Returns: True or False if `pred` has a constant boolean value, None otherwise. Raises: TypeError is pred is not a Variable, Tensor or bool. """ if isinstance(pred, bool): pred_value = pred elif isinstance(pred, variables.Variable): pred_value = None elif isinstance(pred, ops.Tensor): pred_value = tensor_util.constant_value(pred) else: raise TypeError('`pred` must be a Tensor, a Variable, or a Python bool.') return pred_value
apache-2.0
camptocamp/QGIS
python/plugins/processing/grass/ext/r_describe.py
1
1238
# -*- coding: utf-8 -*- """ *************************************************************************** r_describe.py --------------------- Date : December 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'December 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from processing.grass.ext import HtmlReportPostProcessor def postProcessResults(alg): HtmlReportPostProcessor.postProcessResults(alg)
gpl-2.0
highweb-project/highweb-webcl-html5spec
third_party/PRESUBMIT.py
21
5851
# Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. ANDROID_WHITELISTED_LICENSES = [ 'A(pple )?PSL 2(\.0)?', 'Apache( Version)? 2(\.0)?', '(New )?([23]-Clause )?BSD( [23]-Clause)?( with advertising clause)?', 'L?GPL ?v?2(\.[01])?( or later)?', 'MIT(/X11)?(-like)?', 'MPL 1\.1 ?/ ?GPL 2(\.0)? ?/ ?LGPL 2\.1', 'MPL 2(\.0)?', 'Microsoft Limited Public License', 'Microsoft Permissive License', 'Public Domain', 'Python', 'SGI Free Software License B', 'University of Illinois\/NCSA Open Source', 'X11', ] def LicenseIsCompatibleWithAndroid(input_api, license): regex = '^(%s)$' % '|'.join(ANDROID_WHITELISTED_LICENSES) tokens = \ [x.strip() for x in input_api.re.split(' and |,', license) if len(x) > 0] has_compatible_license = False for token in tokens: if input_api.re.match(regex, token, input_api.re.IGNORECASE): has_compatible_license = True break return has_compatible_license def _CheckThirdPartyReadmesUpdated(input_api, output_api): """ Checks to make sure that README.chromium files are properly updated when dependencies in third_party are modified. """ readmes = [] files = [] errors = [] for f in input_api.AffectedFiles(): local_path = f.LocalPath() if input_api.os_path.dirname(local_path) == 'third_party': continue if (local_path.startswith('third_party' + input_api.os_path.sep) and not local_path.startswith('third_party' + input_api.os_path.sep + 'WebKit' + input_api.os_path.sep) and not local_path.startswith('third_party' + input_api.os_path.sep + 'mojo' + input_api.os_path.sep) and not local_path.startswith('third_party' + input_api.os_path.sep + 'boringssl' + input_api.os_path.sep)): files.append(f) if local_path.endswith("README.chromium"): readmes.append(f) if files and not readmes: errors.append(output_api.PresubmitPromptWarning( 'When updating or adding third party code the appropriate\n' '\'README.chromium\' file should also be updated with the correct\n' 'version and package information.', files)) if not readmes: return errors name_pattern = input_api.re.compile( r'^Name: [a-zA-Z0-9_\-\. \(\)]+\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) shortname_pattern = input_api.re.compile( r'^Short Name: [a-zA-Z0-9_\-\.]+\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) version_pattern = input_api.re.compile( r'^Version: [a-zA-Z0-9_\-\.:]+\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) release_pattern = input_api.re.compile( r'^Security Critical: (yes|no)\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) license_pattern = input_api.re.compile( r'^License: (.+)\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) license_android_compatible_pattern = input_api.re.compile( r'^License Android Compatible: (yes|no)\r?$', input_api.re.IGNORECASE | input_api.re.MULTILINE) for f in readmes: if 'D' in f.Action(): _IgnoreIfDeleting(input_api, output_api, f, errors) continue contents = input_api.ReadFile(f) if (not shortname_pattern.search(contents) and not name_pattern.search(contents)): errors.append(output_api.PresubmitError( 'Third party README files should contain either a \'Short Name\' or\n' 'a \'Name\' which is the name under which the package is\n' 'distributed. Check README.chromium.template for details.', [f])) if not version_pattern.search(contents): errors.append(output_api.PresubmitError( 'Third party README files should contain a \'Version\' field.\n' 'If the package is not versioned or the version is not known\n' 'list the version as \'unknown\'.\n' 'Check README.chromium.template for details.', [f])) if not release_pattern.search(contents): errors.append(output_api.PresubmitError( 'Third party README files should contain a \'Security Critical\'\n' 'field. This field specifies whether the package is built with\n' 'Chromium. Check README.chromium.template for details.', [f])) license_match = license_pattern.search(contents) if not license_match: errors.append(output_api.PresubmitError( 'Third party README files should contain a \'License\' field.\n' 'This field specifies the license used by the package. Check\n' 'README.chromium.template for details.', [f])) elif not LicenseIsCompatibleWithAndroid(input_api, license_match.group(1)) \ and not license_android_compatible_pattern.search(contents): errors.append(output_api.PresubmitPromptWarning( 'Cannot determine whether specified license is compatible with\n' + 'the Android licensing requirements. Please check that the license\n' + 'name is spelled according to third_party/PRESUBMIT.py. Please see\n' + 'README.chromium.template for details.', [f])) return errors def _IgnoreIfDeleting(input_api, output_api, affected_file, errors): third_party_dir = input_api.os_path.dirname(affected_file.LocalPath()) for f in input_api.AffectedFiles(): if f.LocalPath().startswith(third_party_dir): if 'D' not in f.Action(): errors.append(output_api.PresubmitError( 'Third party README should only be removed when the whole\n' 'directory is being removed.\n', [f, affected_file])) def CheckChangeOnUpload(input_api, output_api): results = [] results.extend(_CheckThirdPartyReadmesUpdated(input_api, output_api)) return results
bsd-3-clause
CloudVLab/professional-services
examples/qaoa/qubo_test.py
2
4821
# Copyright 2018 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Unit tests for qubo.py.""" import unittest import numpy as np from parse_raw_sat import Clause from parse_raw_sat import Sat from qubo import open_brackets from qubo import Penalty from qubo import Qclause from qubo import QuboProblem class _TestOpenBrackets(unittest.TestCase): def test_open_brackets(self): self.assertCountEqual( open_brackets(Clause([-1, -2, -3])), [Qclause([0, 1, 2], 1)]) def test_open_brackets2(self): self.assertCountEqual( open_brackets(Clause([1, 2, 3])), [Qclause([], 1), Qclause([0], -1), Qclause([1], -1), Qclause([0, 1], 1), Qclause([2], -1), Qclause([0, 2], 1), Qclause([1, 2], 1), Qclause([0, 1, 2], -1)]) class _TestQuboProblem(unittest.TestCase): def _qlauses_equals(self, q1, q2): self.assertCountEqual(q1.vars_ind, q2.vars_ind) def test_simple(self): # (X0|X1|X2) & (X2|X4|X5) clauses = [Clause([1, 2, 3]), Clause([3, 5, 6])] sat = Sat(clauses, 2, 'test') q = QuboProblem(sat) self.assertCountEqual( q.var_names, ['X0', 'X1', 'X2', 'X3', 'X4', 'X5', 'X_0_1', 'X_2_4']) self.assertCountEqual( q.qclauses, [Qclause([], 1), Qclause([0], -1), Qclause([1], -1), Qclause([0, 1], 1), Qclause([2], -1), Qclause([0, 2], 1), Qclause([1, 2], 1), Qclause([2, 6], -1), Qclause([], 1), Qclause([2], -1), Qclause([4], -1), Qclause([2, 4], 1), Qclause([5], -1), Qclause([2, 5], 1), Qclause([4, 5], 1), Qclause([5, 7], -1)]) self.assertCountEqual( q.penalties, [Penalty(0, 1, 6), Penalty(2, 4, 7)]) def test_replace_var(self): qaoa = QuboProblem(Sat([Clause([1])], 1, 'test')) qclauses = [Qclause([0, 1, 2], 1), Qclause([0, 2, 3], 1), Qclause([0, 2], 1), Qclause([0, 1, 4], 1)] qaoa.qclauses = qclauses qaoa._replace_var(0, 1, 5) self.assertCountEqual( qaoa.qclauses, [Qclause([2, 5], 1), Qclause([0, 2, 3], 1), Qclause([0, 2], 1), Qclause([4, 5], 1)]) def test_get_qaoa(self): # (X0|X1|X2) & (X2|X4|X5) clauses = [Clause([1, 2, 3]), Clause([3, 5, 6])] sat = Sat(clauses, 2, 'test') q = QuboProblem(sat) o, i = q.get_qaoa() expected_states = np.array( [-0.5, -0.5, -1., -0., -0.5, -0.5, 1.5, 1.5]) np.testing.assert_allclose(np.array(o), expected_states) expected_interactions = np.array( [[0., 1., .5, 0., 0., 0., -1., 0.], [1., 0., .5, 0., 0., 0., -1., 0.], [.5, .5, 0., 0., 1., .5, -.5, -1.], [0., 0., 0., 0., 0., 0., 0., 0.], [0., 0., 1., 0., 0., .5, 0., -1.], [0., 0., .5, 0., .5, 0., 0., -.5], [-1., -1., -.5, 0., 0., 0., 0., 0.], [0., 0., -1., 0., -1., -.5, 0., 0.]]) np.testing.assert_allclose(np.array(i), expected_interactions) def test_to_qwave(self): # (X0|X1|X2) & (X2|X4|X5) clauses = [Clause([1, 2, 3]), Clause([3, 5, 6])] sat = Sat(clauses, 2, 'test') q = QuboProblem(sat) lines = q.to_qwave_format() output = """c This is a sample .qubo file p qubo 0 8 7 24 c ------------------ 0 0 -0.5 1 1 -0.5 2 2 -1.0 4 4 -0.5 5 5 -0.5 6 6 1.5 7 7 1.5 c ------------------ 0 1 1.0 0 2 0.5 0 6 -1.0 1 0 1.0 1 2 0.5 1 6 -1.0 2 0 0.5 2 1 0.5 2 4 1.0 2 5 0.5 2 6 -0.5 2 7 -1.0 4 2 1.0 4 5 0.5 4 7 -1.0 5 2 0.5 5 4 0.5 5 7 -0.5 6 0 -1.0 6 1 -1.0 6 2 -0.5 7 2 -1.0 7 4 -1.0 7 5 -0.5 5 5 -0.5""".split('\n') for l0, l1 in zip(lines, output): self.assertEqual(l0.strip(), l1.strip()) if __name__ == '__main__': unittest.main()
apache-2.0
AOSPU/external_chromium_org
chrome/common/extensions/docs/server2/content_provider_test.py
10
7168
#!/usr/bin/env python # Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from cStringIO import StringIO import json import unittest from zipfile import ZipFile from compiled_file_system import CompiledFileSystem from content_provider import ContentProvider from file_system import FileNotFoundError from object_store_creator import ObjectStoreCreator from path_canonicalizer import PathCanonicalizer from test_file_system import TestFileSystem from third_party.handlebar import Handlebar _REDIRECTS_JSON = json.dumps({ 'oldfile.html': 'storage.html', 'index.html': 'https://developers.google.com/chrome', }) _MARKDOWN_CONTENT = ( ('# Header 1 #', u'<h1 id="header-1">Header 1</h1>'), ('1. Foo\n', u'<ol>\n<li>Foo</li>\n</ol>'), ('![alt text](/path/img.jpg "Title")\n', '<p><img alt="alt text" src="/path/img.jpg" title="Title" /></p>'), ('* Unordered item 1', u'<ul>\n<li>Unordered item 1</li>\n</ul>') ) # Test file system data which exercises many different mimetypes. _TEST_DATA = { 'dir': { 'a.txt': 'a.txt content', 'b.txt': 'b.txt content', 'c': { 'd.txt': 'd.txt content', }, }, 'dir2': { 'dir3': { 'a.txt': 'a.txt content', 'b.txt': 'b.txt content', 'c': { 'd.txt': 'd.txt content', }, }, }, 'dir4': { 'index.html': 'index.html content 1' }, 'dir5': { 'index.html': 'index.html content 2' }, 'dir6': { 'notindex.html': 'notindex.html content' }, 'dir7': { 'index.md': '\n'.join(text[0] for text in _MARKDOWN_CONTENT) }, 'dir.txt': 'dir.txt content', 'dir5.html': 'dir5.html content', 'img.png': 'img.png content', 'index.html': 'index.html content', 'read.txt': 'read.txt content', 'redirects.json': _REDIRECTS_JSON, 'noextension': 'noextension content', 'run.js': 'run.js content', 'site.css': 'site.css content', 'storage.html': 'storage.html content', 'markdown.md': '\n'.join(text[0] for text in _MARKDOWN_CONTENT) } class ContentProviderUnittest(unittest.TestCase): def setUp(self): self._content_provider = self._CreateContentProvider() def _CreateContentProvider(self, supports_zip=False): object_store_creator = ObjectStoreCreator.ForTest() test_file_system = TestFileSystem(_TEST_DATA) return ContentProvider( 'foo', CompiledFileSystem.Factory(object_store_creator), test_file_system, object_store_creator, default_extensions=('.html', '.md'), # TODO(kalman): Test supports_templates=False. supports_templates=True, supports_zip=supports_zip) def _assertContent(self, content, content_type, content_and_type): # Assert type so that str is differentiated from unicode. self.assertEqual(type(content), type(content_and_type.content)) self.assertEqual(content, content_and_type.content) self.assertEqual(content_type, content_and_type.content_type) def _assertTemplateContent(self, content, path): content_and_type = self._content_provider.GetContentAndType(path).Get() self.assertEqual(Handlebar, type(content_and_type.content)) content_and_type.content = content_and_type.content.source self._assertContent(content, 'text/html', content_and_type) def _assertMarkdownContent(self, content, path): content_and_type = self._content_provider.GetContentAndType(path).Get() content_and_type.content = content_and_type.content.source self._assertContent(content, 'text/html', content_and_type) def testPlainText(self): self._assertContent( u'a.txt content', 'text/plain', self._content_provider.GetContentAndType('dir/a.txt').Get()) self._assertContent( u'd.txt content', 'text/plain', self._content_provider.GetContentAndType('dir/c/d.txt').Get()) self._assertContent( u'read.txt content', 'text/plain', self._content_provider.GetContentAndType('read.txt').Get()) self._assertContent( unicode(_REDIRECTS_JSON, 'utf-8'), 'application/json', self._content_provider.GetContentAndType('redirects.json').Get()) self._assertContent( u'run.js content', 'application/javascript', self._content_provider.GetContentAndType('run.js').Get()) self._assertContent( u'site.css content', 'text/css', self._content_provider.GetContentAndType('site.css').Get()) def testTemplate(self): self._assertTemplateContent(u'storage.html content', 'storage.html') def testImage(self): self._assertContent( 'img.png content', 'image/png', self._content_provider.GetContentAndType('img.png').Get()) def testZipTopLevel(self): zip_content_provider = self._CreateContentProvider(supports_zip=True) content_and_type = zip_content_provider.GetContentAndType('dir.zip').Get() zipfile = ZipFile(StringIO(content_and_type.content)) content_and_type.content = zipfile.namelist() self._assertContent( ['dir/a.txt', 'dir/b.txt', 'dir/c/d.txt'], 'application/zip', content_and_type) def testZip2ndLevel(self): zip_content_provider = self._CreateContentProvider(supports_zip=True) content_and_type = zip_content_provider.GetContentAndType( 'dir2/dir3.zip').Get() zipfile = ZipFile(StringIO(content_and_type.content)) content_and_type.content = zipfile.namelist() self._assertContent( ['dir3/a.txt', 'dir3/b.txt', 'dir3/c/d.txt'], 'application/zip', content_and_type) def testCanonicalZipPaths(self): # Without supports_zip the path is canonicalized as a file. self.assertEqual( 'dir.txt', self._content_provider.GetCanonicalPath('dir.zip')) self.assertEqual( 'dir.txt', self._content_provider.GetCanonicalPath('diR.zip')) # With supports_zip the path is canonicalized as the zip file which # corresponds to the canonical directory. zip_content_provider = self._CreateContentProvider(supports_zip=True) self.assertEqual( 'dir.zip', zip_content_provider.GetCanonicalPath('dir.zip')) self.assertEqual( 'dir.zip', zip_content_provider.GetCanonicalPath('diR.zip')) def testMarkdown(self): self._assertMarkdownContent( '\n'.join(text[1] for text in _MARKDOWN_CONTENT), 'markdown') def testNotFound(self): self.assertRaises( FileNotFoundError, self._content_provider.GetContentAndType('oops').Get) def testIndexRedirect(self): self._assertTemplateContent(u'index.html content', '') self._assertTemplateContent(u'index.html content 1', 'dir4') self._assertTemplateContent(u'dir5.html content', 'dir5') self._assertMarkdownContent( '\n'.join(text[1] for text in _MARKDOWN_CONTENT), 'dir7') self._assertContent( 'noextension content', 'text/plain', self._content_provider.GetContentAndType('noextension').Get()) self.assertRaises( FileNotFoundError, self._content_provider.GetContentAndType('dir6').Get) if __name__ == '__main__': unittest.main()
bsd-3-clause
tectronics/election-gadgets.egypt-staging
voter-info/shapes/makepolys.py
12
6672
#!/usr/bin/env python # makepolys.py import codecs import json import math import os import random import re import shutil import stat import sys import time from geo import Geo import shpUtils import states #states = json.load( open('states.json') ) jsonpath = 'json' shapespath = 'shapefiles' geo = Geo() keysep = '|' states.byNumber = {} useOther = { 'CT': ( 'towns', 'cs09_d00' ), 'MA': ( 'towns', 'cs25_d00' ), 'NH': ( 'towns', 'cs33_d00' ), 'VT': ( 'towns', 'cs50_d00' ), 'KS': ( 'congressional', 'cd20_110' ), 'NE': ( 'congressional', 'cd31_110' ), 'NM': ( 'congressional', 'cd35_110' ), } districtNames = { 'CD1': 'First Congressional District', 'CD2': 'Second Congressional District', 'CD3': 'Third Congressional District', 'CD4': 'Fourth Congressional District', } def loadshapefile( filename ): print 'Loading shapefile %s' % filename t1 = time.time() shapefile = shpUtils.loadShapefile( '%s/%s' %( shapespath, filename ) ) t2 = time.time() print '%0.3f seconds load time' %( t2 - t1 ) return shapefile #def randomColor(): # def hh(): return '%02X' %( random.random() *128 + 96 ) # return hh() + hh() + hh() featuresByName = {} def featureByName( feature ): info = feature['info'] name = info['NAME'] if name not in featuresByName: featuresByName[name] = { 'feature': feature #, #'color': randomColor() } return featuresByName[name] #def filterCONUS( features ): # result = [] # for feature in features: # shape = feature['shape'] # if shape['type'] != 5: continue # info = feature['info'] # state = int(info['STATE']) # if state == 2: continue # Alaska # if state == 15: continue # Hawaii # if state == 72: continue # Puerto Rico # result.append( feature ) # return result def featuresBounds( features ): bounds = [ [ None, None ], [ None, None ] ] for feature in features: shape = feature['shape'] if shape['type'] == 5: for part in shape['parts']: bounds = geo.extendBounds( bounds, part['bounds'] ) return bounds def writeFile( filename, data ): f = open( filename, 'wb' ) f.write( data ) f.close() def readShapefile( filename ): print '----------------------------------------' print 'Loading %s' % filename shapefile = loadshapefile( filename ) features = shapefile['features'] print '%d features' % len(features) #conus = filterCONUS( features ) #conusBounds = featuresBounds( conus ) #stateFeatures = filterCONUS( stateFeatures ) #print '%d features in CONUS states' % len(stateFeatures) #writeFile( 'features.csv', shpUtils.dumpFeatureInfo(features) ) nPoints = nPolys = 0 places = {} for feature in features: shape = feature['shape'] if shape['type'] != 5: continue info = feature['info'] name = info['NAME'].decode( 'cp850' ).encode( 'utf-8' ) name = re.sub( '^(\d+)\x00.*$', 'CD\\1', name ) # congressional district name = districtNames.get( name, name ) state = info['STATE'] key = name + keysep + state if key not in places: places[key] = { 'name': name, 'state': state, 'maxarea': 0.0, 'bounds': [ [ None, None ], [ None, None ] ], 'shapes': [] } place = places[key] shapes = place['shapes'] for part in shape['parts']: nPolys += 1 points = part['points'] n = len(points) - 1 nPoints += n pts = [] area = part['area'] if area == 0: continue bounds = part['bounds'] place['bounds'] = geo.extendBounds( place['bounds'], bounds ) centroid = part['centroid'] if area > place['maxarea']: place['centroid'] = centroid place['maxarea'] = area points = part['points'] for j in xrange(n): point = points[j] #pts.append( '[%.4f,%.4f]' %( float(point[0]), float(point[1]) ) ) pts.append( '{x:%.4f,y:%.4f}' %( float(point[0]), float(point[1]) ) ) #shapes.append( '{area:%.4f,bounds:[[%.4f,%.4f],[%.4f,%.4f]],centroid:[%.4f,%.4f],points:[%s]}' %( shapes.append( '{points:[%s]}' %( #area, #bounds[0][0], bounds[0][1], #bounds[1][0], bounds[1][1], #centroid[0], centroid[1], ','.join(pts) ) ) print '%d points in %d places' %( nPoints, len(places) ) return shapefile, places def writeUS( places, path ): json = [] keys = places.keys() keys.sort() for key in keys: abbr = states.byNumber[ places[key]['state'] ]['abbr'].lower() writeJSON( '%s.js' % abbr, getPlaceJSON( places, key, abbr, 'state' ) ) #def writeStates( places, path ): # p = {} # for k in places: # if places[k] != None: # p[k] = places[k] # places = p # keys = places.keys() # keys.sort() # for key in keys: # name, number = key.split(keysep) # state = states.byNumber[number] # state['json'].append( getPlaceJSON( places, key, state['abbr'].lower(), 'county' ) ) # for state in states.array: # writeJSON( path, state['abbr'].lower(), state['json'] ) def writeJSON( path, json ): file = '%s/%s' %( jsonpath, path ) print 'Writing %s' % file writeFile( file, 'GoogleElectionMap.shapeReady(%s)' %( json ) ) def getPlaceJSON( places, key, state, type ): place = places[key] if not place: return '' bounds = place['bounds'] centroid = place['centroid'] return '{name:"%s", type:"%s",state:"%s",bounds:[[%.4f,%.4f],[%.4f,%.4f]],centroid:[%.4f,%.4f],shapes:[%s]}' %( key.split(keysep)[0], type, state, bounds[0][0], bounds[0][1], bounds[1][0], bounds[1][1], centroid[0], centroid[1], ','.join(place['shapes']) ) def generateUS( detail, path='' ): shapefile, places = readShapefile( 'states/st99_d00_shp-%s/st99_d00.shp' % detail ) for key in places: name, number = key.split(keysep) state = states.byName[name] state['json'] = [] state['counties'] = [] state['number'] = number states.byNumber[number] = state writeUS( places, path ) #def generateStates( detail, path ): # shapefile, places = readShapefile( 'counties/co99_d00_shp-%s/co99_d00.shp' % detail ) # for key, place in places.iteritems(): # name, number = key.split(keysep) # state = states.byNumber[number] # abbr = state['abbr'] # if abbr not in useOther: # state['counties'].append( place ) # else: # places[key] = None # for abbr, file in useOther.iteritems(): # state = states.byAbbr[abbr] # number = state['number'] # othershapefile, otherplaces = readShapefile( # '%(base)s/%(name)s_shp-%(detail)s/%(name)s.shp' %{ # 'base': file[0], # 'name': file[1], # 'detail': detail # } ) # for key, place in otherplaces.iteritems(): # name, number = key.split(keysep) # state = states.byNumber[number] # state['counties'].append( place ) # places[key] = place # writeStates( places, path ) #generateUS( 0, 'full' ) #generateUS( 25, '25' ) generateUS( '00' ) #generateStates( 80, 'detailed' ) print 'Done!'
unlicense
sorenk/ansible
test/units/modules/network/nxos/test_nxos_bgp_neighbor_af.py
39
4916
# (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.compat.tests.mock import patch from ansible.modules.network.nxos import nxos_bgp_neighbor_af from .nxos_module import TestNxosModule, load_fixture, set_module_args class TestNxosBgpNeighborAfModule(TestNxosModule): module = nxos_bgp_neighbor_af def setUp(self): super(TestNxosBgpNeighborAfModule, self).setUp() self.mock_load_config = patch('ansible.modules.network.nxos.nxos_bgp_neighbor_af.load_config') self.load_config = self.mock_load_config.start() self.mock_get_config = patch('ansible.modules.network.nxos.nxos_bgp_neighbor_af.get_config') self.get_config = self.mock_get_config.start() def tearDown(self): super(TestNxosBgpNeighborAfModule, self).tearDown() self.mock_load_config.stop() self.mock_get_config.stop() def load_fixtures(self, commands=None, device=''): self.get_config.return_value = load_fixture('nxos_bgp', 'config.cfg') self.load_config.return_value = [] def test_nxos_bgp_neighbor_af(self): set_module_args(dict(asn=65535, neighbor='3.3.3.3', afi='ipv4', safi='unicast', route_reflector_client=True)) result = self.execute_module(changed=True) self.assertEqual(result['commands'], [ 'router bgp 65535', 'neighbor 3.3.3.3', 'address-family ipv4 unicast', 'route-reflector-client' ]) def test_nxos_bgp_neighbor_af_exists(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast')) self.execute_module(changed=False, commands=[]) def test_nxos_bgp_neighbor_af_absent(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', state='absent')) self.execute_module( changed=True, sort=False, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'no address-family ipv4 unicast'] ) def test_nxos_bgp_neighbor_af_advertise_map(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', advertise_map_exist=['my_advertise_map', 'my_exist_map'])) self.execute_module( changed=True, sort=False, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'address-family ipv4 unicast', 'advertise-map my_advertise_map exist-map my_exist_map'] ) def test_nxos_bgp_neighbor_af_advertise_map_non_exist(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', advertise_map_non_exist=['my_advertise_map', 'my_non_exist_map'])) self.execute_module( changed=True, sort=False, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'address-family ipv4 unicast', 'advertise-map my_advertise_map non-exist-map my_non_exist_map'] ) def test_nxos_bgp_neighbor_af_max_prefix_limit_default(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', max_prefix_limit='default')) self.execute_module( changed=True, sort=False, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'address-family ipv4 unicast', 'no maximum-prefix'] ) def test_nxos_bgp_neighbor_af_max_prefix(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', max_prefix_threshold=20, max_prefix_limit=20)) self.execute_module( changed=True, sort=False, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'address-family ipv4 unicast', 'maximum-prefix 20 20'] ) def test_nxos_bgp_neighbor_af_disable_peer_as_check(self): set_module_args(dict(asn=65535, neighbor='3.3.3.5', afi='ipv4', safi='unicast', disable_peer_as_check=True)) self.execute_module( changed=True, commands=['router bgp 65535', 'neighbor 3.3.3.5', 'address-family ipv4 unicast', 'disable-peer-as-check'] )
gpl-3.0
danieljaouen/ansible
test/units/modules/network/f5/test_bigip_gtm_virtual_server.py
22
5568
# -*- coding: utf-8 -*- # # Copyright: (c) 2017, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys from nose.plugins.skip import SkipTest if sys.version_info < (2, 7): raise SkipTest("F5 Ansible modules require Python >= 2.7") from ansible.compat.tests import unittest from ansible.compat.tests.mock import Mock from ansible.compat.tests.mock import patch from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_gtm_virtual_server import ApiParameters from library.modules.bigip_gtm_virtual_server import ModuleParameters from library.modules.bigip_gtm_virtual_server import ModuleManager from library.modules.bigip_gtm_virtual_server import ArgumentSpec from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import iControlUnexpectedHTTPError from test.unit.modules.utils import set_module_args except ImportError: try: from ansible.modules.network.f5.bigip_gtm_virtual_server import ApiParameters from ansible.modules.network.f5.bigip_gtm_virtual_server import ModuleParameters from ansible.modules.network.f5.bigip_gtm_virtual_server import ModuleManager from ansible.modules.network.f5.bigip_gtm_virtual_server import ArgumentSpec from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError from units.modules.utils import set_module_args except ImportError: raise SkipTest("F5 Ansible modules require the f5-sdk Python library") fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='foo', server_name='server1', address='1.1.1.1', port=22, translation_address='2.2.2.2', translation_port=443, availability_requirements=dict( type='at_least', at_least=2, ), monitors=['http', 'tcp', 'gtp'], virtual_server_dependencies=[ dict( server='server2', virtual_server='vs2' ) ], link='link1', limits=dict( bits_enabled=True, packets_enabled=True, connections_enabled=True, bits_limit=100, packets_limit=200, connections_limit=300 ), state='present' ) p = ModuleParameters(params=args) assert p.name == 'foo' assert p.server_name == 'server1' assert p.address == '1.1.1.1' assert p.port == 22 assert p.translation_address == '2.2.2.2' assert p.translation_port == 443 assert p.availability_requirement_type == 'at_least' assert p.at_least == 2 assert p.monitors == 'min 2 of { /Common/http /Common/tcp /Common/gtp }' assert len(p.virtual_server_dependencies) == 1 assert p.link == '/Common/link1' assert p.bits_enabled == 'enabled' assert p.bits_limit == 100 assert p.packets_enabled == 'enabled' assert p.packets_limit == 200 assert p.connections_enabled == 'enabled' assert p.connections_limit == 300 def test_api_parameters(self): args = load_fixture('load_gtm_server_virtual_2.json') p = ApiParameters(params=args) assert p.name == 'vs2' assert p.address == '6.6.6.6' assert p.port == 8080 assert p.translation_address == 'none' assert p.translation_port == 0 assert p.availability_requirement_type == 'all' assert p.monitors == '/Common/gtp' assert p.bits_enabled == 'enabled' assert p.bits_limit == 100 assert p.packets_enabled == 'enabled' assert p.packets_limit == 200 assert p.connections_enabled == 'enabled' assert p.connections_limit == 300 @patch('ansible.module_utils.f5_utils.AnsibleF5Client._get_mgmt_root', return_value=True) class TestManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create_datacenter(self, *args): set_module_args(dict( server_name='foo', name='vs1', address='1.1.1.1', state='present', password='admin', server='localhost', user='admin' )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) mm = ModuleManager(module=module) # Override methods to force specific logic in the module to happen mm.exists = Mock(side_effect=[False, True]) mm.create_on_device = Mock(return_value=True) results = mm.exec_module() assert results['changed'] is True
gpl-3.0
helldorado/ansible
lib/ansible/modules/crypto/openssl_csr_info.py
11
20851
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2016-2017, Yanis Guenane <yanis+ansible@guenane.org> # Copyright: (c) 2017, Markus Teufelberger <mteufelberger+ansible@mgit.at> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: openssl_csr_info version_added: '2.8' short_description: Provide information of OpenSSL Certificate Signing Requests (CSR) description: - This module allows one to query information on OpenSSL Certificate Signing Requests (CSR). - In case the CSR signature cannot be validated, the module will fail. In this case, all return variables are still returned. - It uses the pyOpenSSL or cryptography python library to interact with OpenSSL. If both the cryptography and PyOpenSSL libraries are available (and meet the minimum version requirements) cryptography will be preferred as a backend over PyOpenSSL (unless the backend is forced with C(select_crypto_backend)) requirements: - PyOpenSSL >= 0.15 or cryptography >= 1.3 author: - Felix Fontein (@felixfontein) - Yanis Guenane (@Spredzy) options: path: description: - Remote absolute path where the CSR file is loaded from. type: path required: true select_crypto_backend: description: - Determines which crypto backend to use. - The default choice is C(auto), which tries to use C(cryptography) if available, and falls back to C(pyopenssl). - If set to C(pyopenssl), will try to use the L(pyOpenSSL,https://pypi.org/project/pyOpenSSL/) library. - If set to C(cryptography), will try to use the L(cryptography,https://cryptography.io/) library. type: str default: auto choices: [ auto, cryptography, pyopenssl ] seealso: - module: openssl_csr ''' EXAMPLES = r''' - name: Generate an OpenSSL Certificate Signing Request openssl_csr: path: /etc/ssl/csr/www.ansible.com.csr privatekey_path: /etc/ssl/private/ansible.com.pem common_name: www.ansible.com - name: Get information on the CSR openssl_csr_info: path: /etc/ssl/csr/www.ansible.com.csr register: result - name: Dump information debug: var: result ''' RETURN = r''' signature_valid: description: - Whether the CSR's signature is valid. - In case the check returns C(no), the module will fail. returned: success type: bool basic_constraints: description: Entries in the C(basic_constraints) extension, or C(none) if extension is not present. returned: success type: list sample: "[CA:TRUE, pathlen:1]" basic_constraints_critical: description: Whether the C(basic_constraints) extension is critical. returned: success type: bool extended_key_usage: description: Entries in the C(extended_key_usage) extension, or C(none) if extension is not present. returned: success type: list sample: "[Biometric Info, DVCS, Time Stamping]" extended_key_usage_critical: description: Whether the C(extended_key_usage) extension is critical. returned: success type: bool extensions_by_oid: description: Returns a dictionary for every extension OID returned: success type: complex contains: critical: description: Whether the extension is critical. returned: success type: bool value: description: The Base64 encoded value (in DER format) of the extension returned: success type: str sample: "MAMCAQU=" sample: '{"1.3.6.1.5.5.7.1.24": { "critical": false, "value": "MAMCAQU="}}' key_usage: description: Entries in the C(key_usage) extension, or C(none) if extension is not present. returned: success type: str sample: "[Key Agreement, Data Encipherment]" key_usage_critical: description: Whether the C(key_usage) extension is critical. returned: success type: bool subject_alt_name: description: Entries in the C(subject_alt_name) extension, or C(none) if extension is not present. returned: success type: list sample: "[DNS:www.ansible.com, IP:1.2.3.4]" subject_alt_name_critical: description: Whether the C(subject_alt_name) extension is critical. returned: success type: bool ocsp_must_staple: description: C(yes) if the OCSP Must Staple extension is present, C(none) otherwise. returned: success type: bool ocsp_must_staple_critical: description: Whether the C(ocsp_must_staple) extension is critical. returned: success type: bool subject: description: The CSR's subject. returned: success type: dict sample: '{"commonName": "www.example.com", "emailAddress": "test@example.com"}' public_key: description: CSR's public key in PEM format returned: success type: str sample: "-----BEGIN PUBLIC KEY-----\nMIICIjANBgkqhkiG9w0BAQEFAAOCAg8A..." public_key_fingerprints: description: - Fingerprints of CSR's public key. - For every hash algorithm available, the fingerprint is computed. returned: success type: dict sample: "{'sha256': 'd4:b3:aa:6d:c8:04:ce:4e:ba:f6:29:4d:92:a3:94:b0:c2:ff:bd:bf:33:63:11:43:34:0f:51:b0:95:09:2f:63', 'sha512': 'f7:07:4a:f0:b0:f0:e6:8b:95:5f:f9:e6:61:0a:32:68:f1..." ''' import abc import datetime import os import traceback from distutils.version import LooseVersion from ansible.module_utils import crypto as crypto_utils from ansible.module_utils.basic import AnsibleModule, missing_required_lib from ansible.module_utils.six import string_types from ansible.module_utils._text import to_native, to_text, to_bytes from ansible.module_utils.compat import ipaddress as compat_ipaddress MINIMAL_CRYPTOGRAPHY_VERSION = '1.3' MINIMAL_PYOPENSSL_VERSION = '0.15' PYOPENSSL_IMP_ERR = None try: import OpenSSL from OpenSSL import crypto PYOPENSSL_VERSION = LooseVersion(OpenSSL.__version__) if OpenSSL.SSL.OPENSSL_VERSION_NUMBER >= 0x10100000: # OpenSSL 1.1.0 or newer OPENSSL_MUST_STAPLE_NAME = b"tlsfeature" OPENSSL_MUST_STAPLE_VALUE = b"status_request" else: # OpenSSL 1.0.x or older OPENSSL_MUST_STAPLE_NAME = b"1.3.6.1.5.5.7.1.24" OPENSSL_MUST_STAPLE_VALUE = b"DER:30:03:02:01:05" except ImportError: PYOPENSSL_IMP_ERR = traceback.format_exc() PYOPENSSL_FOUND = False else: PYOPENSSL_FOUND = True CRYPTOGRAPHY_IMP_ERR = None try: import cryptography from cryptography import x509 from cryptography.hazmat.primitives import serialization CRYPTOGRAPHY_VERSION = LooseVersion(cryptography.__version__) except ImportError: CRYPTOGRAPHY_IMP_ERR = traceback.format_exc() CRYPTOGRAPHY_FOUND = False else: CRYPTOGRAPHY_FOUND = True TIMESTAMP_FORMAT = "%Y%m%d%H%M%SZ" class CertificateSigningRequestInfo(crypto_utils.OpenSSLObject): def __init__(self, module, backend): super(CertificateSigningRequestInfo, self).__init__( module.params['path'], 'present', False, module.check_mode, ) self.backend = backend self.module = module def generate(self): # Empty method because crypto_utils.OpenSSLObject wants this pass def dump(self): # Empty method because crypto_utils.OpenSSLObject wants this pass @abc.abstractmethod def _get_subject(self): pass @abc.abstractmethod def _get_key_usage(self): pass @abc.abstractmethod def _get_extended_key_usage(self): pass @abc.abstractmethod def _get_basic_constraints(self): pass @abc.abstractmethod def _get_ocsp_must_staple(self): pass @abc.abstractmethod def _get_subject_alt_name(self): pass @abc.abstractmethod def _get_public_key(self, binary): pass @abc.abstractmethod def _get_all_extensions(self): pass @abc.abstractmethod def _is_signature_valid(self): pass def get_info(self): result = dict() self.csr = crypto_utils.load_certificate_request(self.path, backend=self.backend) result['subject'] = self._get_subject() result['key_usage'], result['key_usage_critical'] = self._get_key_usage() result['extended_key_usage'], result['extended_key_usage_critical'] = self._get_extended_key_usage() result['basic_constraints'], result['basic_constraints_critical'] = self._get_basic_constraints() result['ocsp_must_staple'], result['ocsp_must_staple_critical'] = self._get_ocsp_must_staple() result['subject_alt_name'], result['subject_alt_name_critical'] = self._get_subject_alt_name() result['public_key'] = self._get_public_key(binary=False) pk = self._get_public_key(binary=True) result['public_key_fingerprints'] = crypto_utils.get_fingerprint_of_bytes(pk) if pk is not None else dict() result['extensions_by_oid'] = self._get_all_extensions() result['signature_valid'] = self._is_signature_valid() if not result['signature_valid']: self.module.fail_json( msg='CSR signature is invalid!', **result ) return result class CertificateSigningRequestInfoCryptography(CertificateSigningRequestInfo): """Validate the supplied CSR, using the cryptography backend""" def __init__(self, module): super(CertificateSigningRequestInfoCryptography, self).__init__(module, 'cryptography') def _get_subject(self): result = dict() for attribute in self.csr.subject: result[crypto_utils.cryptography_oid_to_name(attribute.oid)] = attribute.value return result def _get_key_usage(self): try: current_key_ext = self.csr.extensions.get_extension_for_class(x509.KeyUsage) current_key_usage = current_key_ext.value key_usage = dict( digital_signature=current_key_usage.digital_signature, content_commitment=current_key_usage.content_commitment, key_encipherment=current_key_usage.key_encipherment, data_encipherment=current_key_usage.data_encipherment, key_agreement=current_key_usage.key_agreement, key_cert_sign=current_key_usage.key_cert_sign, crl_sign=current_key_usage.crl_sign, encipher_only=False, decipher_only=False, ) if key_usage['key_agreement']: key_usage.update(dict( encipher_only=current_key_usage.encipher_only, decipher_only=current_key_usage.decipher_only )) key_usage_names = dict( digital_signature='Digital Signature', content_commitment='Non Repudiation', key_encipherment='Key Encipherment', data_encipherment='Data Encipherment', key_agreement='Key Agreement', key_cert_sign='Certificate Sign', crl_sign='CRL Sign', encipher_only='Encipher Only', decipher_only='Decipher Only', ) return sorted([ key_usage_names[name] for name, value in key_usage.items() if value ]), current_key_ext.critical except cryptography.x509.ExtensionNotFound: return None, False def _get_extended_key_usage(self): try: ext_keyusage_ext = self.csr.extensions.get_extension_for_class(x509.ExtendedKeyUsage) return sorted([ crypto_utils.cryptography_oid_to_name(eku) for eku in ext_keyusage_ext.value ]), ext_keyusage_ext.critical except cryptography.x509.ExtensionNotFound: return None, False def _get_basic_constraints(self): try: ext_keyusage_ext = self.csr.extensions.get_extension_for_class(x509.BasicConstraints) result = [] result.append('CA:{0}'.format('TRUE' if ext_keyusage_ext.value.ca else 'FALSE')) if ext_keyusage_ext.value.path_length is not None: result.append('pathlen:{0}'.format(ext_keyusage_ext.value.path_length)) return sorted(result), ext_keyusage_ext.critical except cryptography.x509.ExtensionNotFound: return None, False def _get_ocsp_must_staple(self): try: try: # This only works with cryptography >= 2.1 tlsfeature_ext = self.csr.extensions.get_extension_for_class(x509.TLSFeature) value = cryptography.x509.TLSFeatureType.status_request in tlsfeature_ext.value except AttributeError as dummy: # Fallback for cryptography < 2.1 oid = x509.oid.ObjectIdentifier("1.3.6.1.5.5.7.1.24") tlsfeature_ext = self.csr.extensions.get_extension_for_oid(oid) value = tlsfeature_ext.value.value == b"\x30\x03\x02\x01\x05" return value, tlsfeature_ext.critical except cryptography.x509.ExtensionNotFound: return None, False def _get_subject_alt_name(self): try: san_ext = self.csr.extensions.get_extension_for_class(x509.SubjectAlternativeName) result = [crypto_utils.cryptography_decode_name(san) for san in san_ext.value] return result, san_ext.critical except cryptography.x509.ExtensionNotFound: return None, False def _get_public_key(self, binary): return self.csr.public_key().public_bytes( serialization.Encoding.DER if binary else serialization.Encoding.PEM, serialization.PublicFormat.SubjectPublicKeyInfo ) def _get_all_extensions(self): return crypto_utils.cryptography_get_extensions_from_csr(self.csr) def _is_signature_valid(self): return self.csr.is_signature_valid class CertificateSigningRequestInfoPyOpenSSL(CertificateSigningRequestInfo): """validate the supplied CSR.""" def __init__(self, module): super(CertificateSigningRequestInfoPyOpenSSL, self).__init__(module, 'pyopenssl') def __get_name(self, name): result = dict() for sub in name.get_components(): result[crypto_utils.pyopenssl_normalize_name(sub[0])] = to_text(sub[1]) return result def _get_subject(self): return self.__get_name(self.csr.get_subject()) def _get_extension(self, short_name): for extension in self.csr.get_extensions(): if extension.get_short_name() == short_name: result = [ crypto_utils.pyopenssl_normalize_name(usage.strip()) for usage in to_text(extension, errors='surrogate_or_strict').split(',') ] return sorted(result), bool(extension.get_critical()) return None, False def _get_key_usage(self): return self._get_extension(b'keyUsage') def _get_extended_key_usage(self): return self._get_extension(b'extendedKeyUsage') def _get_basic_constraints(self): return self._get_extension(b'basicConstraints') def _get_ocsp_must_staple(self): extensions = self.csr.get_extensions() oms_ext = [ ext for ext in extensions if to_bytes(ext.get_short_name()) == OPENSSL_MUST_STAPLE_NAME and to_bytes(ext) == OPENSSL_MUST_STAPLE_VALUE ] if OpenSSL.SSL.OPENSSL_VERSION_NUMBER < 0x10100000: # Older versions of libssl don't know about OCSP Must Staple oms_ext.extend([ext for ext in extensions if ext.get_short_name() == b'UNDEF' and ext.get_data() == b'\x30\x03\x02\x01\x05']) if oms_ext: return True, bool(oms_ext[0].get_critical()) else: return None, False def _normalize_san(self, san): # apperently openssl returns 'IP address' not 'IP' as specifier when converting the subjectAltName to string # although it won't accept this specifier when generating the CSR. (https://github.com/openssl/openssl/issues/4004) if san.startswith('IP Address:'): san = 'IP:' + san[len('IP Address:'):] if san.startswith('IP:'): ip = compat_ipaddress.ip_address(san[3:]) san = 'IP:{0}'.format(ip.compressed) return san def _get_subject_alt_name(self): for extension in self.csr.get_extensions(): if extension.get_short_name() == b'subjectAltName': result = [self._normalize_san(altname.strip()) for altname in to_text(extension, errors='surrogate_or_strict').split(', ')] return result, bool(extension.get_critical()) return None, False def _get_public_key(self, binary): try: return crypto.dump_publickey( crypto.FILETYPE_ASN1 if binary else crypto.FILETYPE_PEM, self.csr.get_pubkey() ) except AttributeError: try: bio = crypto._new_mem_buf() if binary: rc = crypto._lib.i2d_PUBKEY_bio(bio, self.csr.get_pubkey()._pkey) else: rc = crypto._lib.PEM_write_bio_PUBKEY(bio, self.csr.get_pubkey()._pkey) if rc != 1: crypto._raise_current_error() return crypto._bio_to_string(bio) except AttributeError: self.module.warn('Your pyOpenSSL version does not support dumping public keys. ' 'Please upgrade to version 16.0 or newer, or use the cryptography backend.') def _get_all_extensions(self): return crypto_utils.pyopenssl_get_extensions_from_csr(self.csr) def _is_signature_valid(self): try: return bool(self.csr.verify(self.csr.get_pubkey())) except crypto.Error: # OpenSSL error means that key is not consistent return False def main(): module = AnsibleModule( argument_spec=dict( path=dict(type='path', required=True), select_crypto_backend=dict(type='str', default='auto', choices=['auto', 'cryptography', 'pyopenssl']), ), supports_check_mode=True, ) try: base_dir = os.path.dirname(module.params['path']) or '.' if not os.path.isdir(base_dir): module.fail_json( name=base_dir, msg='The directory %s does not exist or the file is not a directory' % base_dir ) backend = module.params['select_crypto_backend'] if backend == 'auto': # Detect what backend we can use can_use_cryptography = CRYPTOGRAPHY_FOUND and CRYPTOGRAPHY_VERSION >= LooseVersion(MINIMAL_CRYPTOGRAPHY_VERSION) can_use_pyopenssl = PYOPENSSL_FOUND and PYOPENSSL_VERSION >= LooseVersion(MINIMAL_PYOPENSSL_VERSION) # If cryptography is available we'll use it if can_use_cryptography: backend = 'cryptography' elif can_use_pyopenssl: backend = 'pyopenssl' # Fail if no backend has been found if backend == 'auto': module.fail_json(msg=("Can't detect any of the required Python libraries " "cryptography (>= {0}) or PyOpenSSL (>= {1})").format( MINIMAL_CRYPTOGRAPHY_VERSION, MINIMAL_PYOPENSSL_VERSION)) if backend == 'pyopenssl': if not PYOPENSSL_FOUND: module.fail_json(msg=missing_required_lib('pyOpenSSL'), exception=PYOPENSSL_IMP_ERR) try: getattr(crypto.X509Req, 'get_extensions') except AttributeError: module.fail_json(msg='You need to have PyOpenSSL>=0.15') certificate = CertificateSigningRequestInfoPyOpenSSL(module) elif backend == 'cryptography': if not CRYPTOGRAPHY_FOUND: module.fail_json(msg=missing_required_lib('cryptography'), exception=CRYPTOGRAPHY_IMP_ERR) certificate = CertificateSigningRequestInfoCryptography(module) result = certificate.get_info() module.exit_json(**result) except crypto_utils.OpenSSLObjectError as exc: module.fail_json(msg=to_native(exc)) if __name__ == "__main__": main()
gpl-3.0
cython-testbed/pandas
pandas/io/clipboard/windows.py
23
5441
""" This module implements clipboard handling on Windows using ctypes. """ import time import contextlib import ctypes from ctypes import c_size_t, sizeof, c_wchar_p, get_errno, c_wchar from .exceptions import PyperclipWindowsException class CheckedCall(object): def __init__(self, f): super(CheckedCall, self).__setattr__("f", f) def __call__(self, *args): ret = self.f(*args) if not ret and get_errno(): raise PyperclipWindowsException("Error calling " + self.f.__name__) return ret def __setattr__(self, key, value): setattr(self.f, key, value) def init_windows_clipboard(): from ctypes.wintypes import (HGLOBAL, LPVOID, DWORD, LPCSTR, INT, HWND, HINSTANCE, HMENU, BOOL, UINT, HANDLE) windll = ctypes.windll safeCreateWindowExA = CheckedCall(windll.user32.CreateWindowExA) safeCreateWindowExA.argtypes = [DWORD, LPCSTR, LPCSTR, DWORD, INT, INT, INT, INT, HWND, HMENU, HINSTANCE, LPVOID] safeCreateWindowExA.restype = HWND safeDestroyWindow = CheckedCall(windll.user32.DestroyWindow) safeDestroyWindow.argtypes = [HWND] safeDestroyWindow.restype = BOOL OpenClipboard = windll.user32.OpenClipboard OpenClipboard.argtypes = [HWND] OpenClipboard.restype = BOOL safeCloseClipboard = CheckedCall(windll.user32.CloseClipboard) safeCloseClipboard.argtypes = [] safeCloseClipboard.restype = BOOL safeEmptyClipboard = CheckedCall(windll.user32.EmptyClipboard) safeEmptyClipboard.argtypes = [] safeEmptyClipboard.restype = BOOL safeGetClipboardData = CheckedCall(windll.user32.GetClipboardData) safeGetClipboardData.argtypes = [UINT] safeGetClipboardData.restype = HANDLE safeSetClipboardData = CheckedCall(windll.user32.SetClipboardData) safeSetClipboardData.argtypes = [UINT, HANDLE] safeSetClipboardData.restype = HANDLE safeGlobalAlloc = CheckedCall(windll.kernel32.GlobalAlloc) safeGlobalAlloc.argtypes = [UINT, c_size_t] safeGlobalAlloc.restype = HGLOBAL safeGlobalLock = CheckedCall(windll.kernel32.GlobalLock) safeGlobalLock.argtypes = [HGLOBAL] safeGlobalLock.restype = LPVOID safeGlobalUnlock = CheckedCall(windll.kernel32.GlobalUnlock) safeGlobalUnlock.argtypes = [HGLOBAL] safeGlobalUnlock.restype = BOOL GMEM_MOVEABLE = 0x0002 CF_UNICODETEXT = 13 @contextlib.contextmanager def window(): """ Context that provides a valid Windows hwnd. """ # we really just need the hwnd, so setting "STATIC" # as predefined lpClass is just fine. hwnd = safeCreateWindowExA(0, b"STATIC", None, 0, 0, 0, 0, 0, None, None, None, None) try: yield hwnd finally: safeDestroyWindow(hwnd) @contextlib.contextmanager def clipboard(hwnd): """ Context manager that opens the clipboard and prevents other applications from modifying the clipboard content. """ # We may not get the clipboard handle immediately because # some other application is accessing it (?) # We try for at least 500ms to get the clipboard. t = time.time() + 0.5 success = False while time.time() < t: success = OpenClipboard(hwnd) if success: break time.sleep(0.01) if not success: raise PyperclipWindowsException("Error calling OpenClipboard") try: yield finally: safeCloseClipboard() def copy_windows(text): # This function is heavily based on # http://msdn.com/ms649016#_win32_Copying_Information_to_the_Clipboard with window() as hwnd: # http://msdn.com/ms649048 # If an application calls OpenClipboard with hwnd set to NULL, # EmptyClipboard sets the clipboard owner to NULL; # this causes SetClipboardData to fail. # => We need a valid hwnd to copy something. with clipboard(hwnd): safeEmptyClipboard() if text: # http://msdn.com/ms649051 # If the hMem parameter identifies a memory object, # the object must have been allocated using the # function with the GMEM_MOVEABLE flag. count = len(text) + 1 handle = safeGlobalAlloc(GMEM_MOVEABLE, count * sizeof(c_wchar)) locked_handle = safeGlobalLock(handle) ctypes.memmove(c_wchar_p(locked_handle), c_wchar_p(text), count * sizeof(c_wchar)) safeGlobalUnlock(handle) safeSetClipboardData(CF_UNICODETEXT, handle) def paste_windows(): with clipboard(None): handle = safeGetClipboardData(CF_UNICODETEXT) if not handle: # GetClipboardData may return NULL with errno == NO_ERROR # if the clipboard is empty. # (Also, it may return a handle to an empty buffer, # but technically that's not empty) return "" return c_wchar_p(handle).value return copy_windows, paste_windows
bsd-3-clause
ahnjungho/fabtools
fabtools/utils.py
9
1274
""" Utilities ========= """ from pipes import quote import os import posixpath from fabric.api import env, hide, run, sudo def run_as_root(command, *args, **kwargs): """ Run a remote command as the root user. When connecting as root to the remote system, this will use Fabric's ``run`` function. In other cases, it will use ``sudo``. """ if env.user == 'root': func = run else: func = sudo return func(command, *args, **kwargs) def get_cwd(local=False): from fabric.api import local as local_run with hide('running', 'stdout'): if local: return local_run('pwd', capture=True) else: return run('pwd') def abspath(path, local=False): path_mod = os.path if local else posixpath if not path_mod.isabs(path): cwd = get_cwd(local=local) path = path_mod.join(cwd, path) return path_mod.normpath(path) def download(url, retry=10): from fabtools.require.curl import command as require_curl require_curl() run('curl --silent --retry %s -O %s' % (retry, url)) def read_file(path): with hide('running', 'stdout'): return run('cat {0}'.format(quote(path))) def read_lines(path): return read_file(path).splitlines()
bsd-2-clause
defstryker/Hex-Omega
users/views.py
1
14055
import mimetypes from django.contrib import messages from django.contrib.auth.mixins import LoginRequiredMixin from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage from django.shortcuts import render, redirect from django.http import HttpResponse from django.contrib.auth import login, logout, update_session_auth_hash from django.contrib.auth.decorators import login_required from django.views.generic.edit import UpdateView, CreateView from django.views.generic.detail import DetailView from django.views.generic.list import ListView from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger from django.contrib.auth.models import Group from HexOmega.settings import BASE_DIR from .utils import get_default_password, mail_kickoff, uploaded_file_handler from .models import Project, AdminUser, MemberUser, LeaderUser, Task from .backends import CustomUserAuth from .forms.login_form import LoginForm from .forms.project_forms import CreateProjectForm from .forms.task_forms import CreateTaskForm, LeaderUpdateTaskForm from .forms.member_form import MemberUpdate, MemberCreate from .Xav.user_context import url_context from log.Log import log import os def index(request): return render(request, 'users/index.html') def login_auth_2(request): """ Login page authentication using django forms. If easier and simpler, implement this else the stuff I threw together up there. :param request: :return: """ if request.user.is_authenticated(): return redirect('user_logged_in', request.user.username) if request.method == 'POST': form = LoginForm(request.POST) if form.is_valid(): username = request.POST.get('username') password = request.POST.get('password') rem = request.POST.get('rem') user = CustomUserAuth().authenticate(username=username, password=password) if user is False: form.errors['password'] = 'The username or password is incorrect.' return render(request, 'users/login.html', { 'form': form, 'errors': form.errors }) if user is not None: print('User [{}] is logging in.'.format(user.username)) login(request, user, backend='django.contrib.auth.backends.ModelBackend') if rem is not None: request.session.set_expiry(7200) else: request.session.get_expire_at_browser_close() return redirect('user_logged_in', username=username) else: form = LoginForm() return render(request, 'users/login.html', {'form': form}) @login_required def logged_in(request, username): if AdminUser.objects.filter(username__exact=username).count() == 1: return redirect('open_project', username) elif LeaderUser.objects.filter(username__exact=username).count() == 1: return redirect('leader_home', username) else: user = MemberUser.objects.get(username__exact=username) return redirect('task_list', username) @login_required def jump_ship(request): print('jumping ship....') logout(request) return redirect('login_page') @login_required def delete_admin(request, username, d): """ Using random, crappy, no good, templates. good enough for testing. Will add appropriate ones soon. """ a = AdminUser.objects.get(username__exact=d) a.delete() print('deleted') return redirect('list_users', username) @login_required def member_upload(request, username, task): t = Task.objects.get(title=task) if 'up_file' in request.FILES: t.deliverable = request.FILES['up_file'] t.save() mail_kickoff(MemberUser.objects.get(username__exact=username), t, var=3) log('SUCCESS', MemberUser.objects.get(username__exact=username), '{} uploaded a deliverable for {}'.format(username, t.title)) print(t.deliverable.url) else: print('No file!!') return redirect('task_list', username) @login_required def list_users(request, username): return render(request, 'list.html', {'admins': AdminUser.objects.all()}) @login_required @url_context def get_list_of_users(request): """ Display a list of admin users /list/ :param request: :return: :author Caroline """ admin_user_list = AdminUser.objects.order_by('pk') paginator = Paginator(admin_user_list, 1) # Show 3 admin per page page = request.GET.get('page') try: admin_list = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. admin_list = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. admin_list = paginator.page(paginator.num_pages) context = {'admin_list': admin_list, 'page': page} return render(request, 'users/list_of_users.html', context) # ============================================================================ # Release Me! @login_required def leader_home(request, username): user = LeaderUser.objects.get(username__exact=username) try: tasks = user.project.actionlist.task_set.all() for task in Task.objects.filter(action_list__project__leader__username=username): print(task.title, task.action_list.project.name) # print(task.deliverable.url) except Exception as e: print('Ahhhhhh') tasks = None return render(request, 'leader_home.html', {'user': user, 'tasks': tasks}) class CreateMember(CreateView, LoginRequiredMixin): # fields = ['username', 'first_name', 'last_name', 'role', 'email', 'phone'] form_class = MemberCreate username = '' model = MemberUser l = None template_name = 'create_member.html' def form_valid(self, form): form.instance.project = self.l.project password = get_default_password() form.instance.set_password(password) mail_kickoff(form.instance, password) messages.add_message(self.request, messages.INFO, 'User [{}] created.'.format(form.instance.username)) update_session_auth_hash(self.request, self.request.user) return super(CreateMember, self).form_valid(form) def get_form_kwargs(self): self.l = LeaderUser.objects.get(username__exact=self.request.user.username) p = self.request.get_full_path() print(p) self.success_url = '/'.join(p.split('/')[:-1]) + '/' kwargs = super(CreateMember, self).get_form_kwargs() # kwargs['pn'] = l.project.name return kwargs class MemberHome(DetailView, LoginRequiredMixin): model = MemberUser username = '' template_name = 'member_home.html' def get_object(self, queryset=None): return MemberUser.objects.get(username=self.kwargs.get('username')) def get_context_data(self, **kwargs): context = super(MemberHome, self).get_context_data(**kwargs) return context @login_required def show_tasks(request, username): ts = Task.objects.filter(members__username=username) print(ts) return render(request, 'list.html', {'tasks': ts}) # ============================================================================ # My project and tasks modules # 2017-03-22 def get_project_path(p): return os.path.join(BASE_DIR, os.path.join('projects', p.name + '/')) @login_required def create_project(request, username): if request.method == 'POST': form = CreateProjectForm(request.POST) if form.is_valid(): p = form.save(commit=False) p.leader = LeaderUser.objects.get(username__exact=username) p.save() for a in request.POST.get('admins'): p.admins.add(a) path = get_project_path(p) # os.makedirs(path, 0o755) if not os.path.exists(path): os.mkdir(path, 0o755) if not os.path.exists(os.path.join(path, 'activity.log')): f = open(os.path.join(path, 'activity.log'), 'w+') f.close() return redirect('display_leader', username) else: form = CreateProjectForm() return render(request, 'crproj.html', {'form': form}) @login_required def create_task(request, username): l = LeaderUser.objects.get(username__exact=username) if request.method == 'POST': form = CreateTaskForm(request.POST) if form.is_valid(): mem_dat = form.cleaned_data.get('members') title = form.cleaned_data.get('title') est_end = form.cleaned_data.get('est_end') status = form.cleaned_data.get('status') lt = form.cleaned_data.get('to_leader') if lt is None: lt = False t = Task.objects.create(title=title, est_end=est_end, status=status, to_leader=lt, action_list=l.project.actionlist) t.save() for m in mem_dat: t.members.add(m) t.save() log('INFO', l, '{} added a new Task [{}]'.format(l.username, t.title)) return redirect('leader_home', username) else: print(form.errors) else: form = CreateTaskForm({'pn': l.project.name}) return render(request, 'crtask.html', {'form': form}) class TaskUpdate(UpdateView, LoginRequiredMixin): username = '' model = Task template_name = 'crtask.html' content_type = 'multipart-form-data' form_class = LeaderUpdateTaskForm def get_form_kwargs(self): l = LeaderUser.objects.get(username__exact=self.request.user.username) t = Task.objects.get(pk=self.kwargs['pk']) up_flag = False up_name = '' if bool(t.deliverable): up_flag = True up_name = t.deliverable.name.split('/')[-1] t.status = 'Completed' t.save() log('SUCCESS', l, '{} uploaded a deliverable to Task [{}]'.format(l.username, t.title)) mail_kickoff(l, t, var=3) p = self.request.get_full_path() self.success_url = '/'.join(p.split('/')[:-3]) + '/' kwargs = super(TaskUpdate, self).get_form_kwargs() kwargs['pn'] = l.project.name kwargs['up_flag'] = up_flag kwargs['up_name'] = up_name log('INFO', l, '{} made changes to Task [{}]'.format(l.username, t.title)) return kwargs @login_required def update_member(request, username): mem = MemberUser.objects.get(username__exact=username) form = MemberUpdate(request.POST, initial={ 'first_name': mem.first_name, 'last_name': mem.last_name, 'email': mem.email, 'phone': mem.phone }) if request.method == 'POST': if form.is_valid(): fn = request.POST.get('first_name') ln = request.POST.get('last_name') email = request.POST.get('email') p = request.POST.get('password') ph = request.POST.get('phone') if fn is not '': mem.first_name = fn if ln is not '': mem.last_name = ln if email is not '': mem.email = email if (p is not None and p is not '') and len(p.strip()) >= 8: mem.set_password(p) if ph is not '': mem.phone = ph if mem.has_usable_password(): update_session_auth_hash(request, mem) mem.save() logout(request) return redirect('login_page') else: print(form.errors) else: form = MemberUpdate() return render(request, 'update_member.html', { 'form': form, 'user': mem, 'title': 'Update' }) def get_list_of_members(request, username): member_user_list = MemberUser.objects.order_by('pk').filter(project__leader__username=username) user = LeaderUser.objects.get(username__iexact=username) paginator = Paginator(member_user_list, 5) # Show 3 admin per page page = request.GET.get('page') try: member_list = paginator.page(page) except PageNotAnInteger: # If page is not an integer, deliver first page. member_list = paginator.page(1) except EmptyPage: # If page is out of range (e.g. 9999), deliver last page of results. member_list = paginator.page(paginator.num_pages) context = {'member_list': member_list, 'page': page, 'user': user} return render(request, 'users/list_of_members.html', context) def delete_a_member(request, username, d): if MemberUser.objects.get(username__iexact=d): person = MemberUser.objects.get(username__iexact=d) person.delete() return redirect('members_list', username) @login_required def project_information(request, username, p): print('Yoohoo!') project = Project.objects.get(name__exact=p) for p in project.actionlist.task_set.all(): print(p.deliverable.name.split('/')[-1]) return render(request, 'users/project_information.html', {'project': project}) @login_required def send_file(request, username, p, task): task = Task.objects.get(title__exact=task) file_path = '/' + task.deliverable.url if '%20' in file_path: file_path = file_path.replace('%20', ' ') file_mimetype = mimetypes.guess_type(file_path) if os.path.exists(file_path): with open(file_path, 'rb') as fh: response = HttpResponse(fh.read(), content_type=file_mimetype) response['Content-Disposition'] = 'attachment; filename=' + os.path.basename(file_path) return response else: return HttpResponse('File retrieval error.')
mit
deepsrijit1105/edx-platform
lms/djangoapps/grades/config/models.py
2
2642
""" Models for configuration of the feature flags controlling persistent grades. """ from config_models.models import ConfigurationModel from django.conf import settings from django.db.models import BooleanField from xmodule_django.models import CourseKeyField class PersistentGradesEnabledFlag(ConfigurationModel): """ Enables persistent grades across the platform. When this feature flag is set to true, individual courses must also have persistent grades enabled for the feature to take effect. """ # this field overrides course-specific settings to enable the feature for all courses enabled_for_all_courses = BooleanField(default=False) @classmethod def feature_enabled(cls, course_id=None): """ Looks at the currently active configuration model to determine whether the persistent grades feature is available. If the flag is not enabled, the feature is not available. If the flag is enabled and the provided course_id is for an course with persistent grades enabled, the feature is available. If the flag is enabled and no course ID is given, we return True since the global setting is enabled. """ if settings.FEATURES.get('PERSISTENT_GRADES_ENABLED_FOR_ALL_TESTS'): return True if not PersistentGradesEnabledFlag.is_enabled(): return False elif not PersistentGradesEnabledFlag.current().enabled_for_all_courses and course_id: try: return CoursePersistentGradesFlag.objects.get(course_id=course_id).enabled except CoursePersistentGradesFlag.DoesNotExist: return False return True class Meta(object): app_label = "grades" def __unicode__(self): current_model = PersistentGradesEnabledFlag.current() return u"PersistentGradesEnabledFlag: enabled {}".format( current_model.is_enabled() ) class CoursePersistentGradesFlag(ConfigurationModel): """ Enables persistent grades for a specific course. Only has an effect if the general flag above is set to True. """ KEY_FIELDS = ('course_id',) class Meta(object): app_label = "grades" # The course that these features are attached to. course_id = CourseKeyField(max_length=255, db_index=True, unique=True) def __unicode__(self): not_en = "Not " if self.enabled: not_en = "" # pylint: disable=no-member return u"Course '{}': Persistent Grades {}Enabled".format(self.course_id.to_deprecated_string(), not_en)
agpl-3.0
aerickson/ansible
test/units/executor/test_playbook_executor.py
60
6304
# (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.compat.tests import unittest from ansible.compat.tests.mock import MagicMock from ansible.executor.playbook_executor import PlaybookExecutor from ansible.playbook import Playbook from ansible.template import Templar from units.mock.loader import DictDataLoader class TestPlaybookExecutor(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def test_get_serialized_batches(self): fake_loader = DictDataLoader({ 'no_serial.yml': ''' - hosts: all gather_facts: no tasks: - debug: var=inventory_hostname ''', 'serial_int.yml': ''' - hosts: all gather_facts: no serial: 2 tasks: - debug: var=inventory_hostname ''', 'serial_pct.yml': ''' - hosts: all gather_facts: no serial: 20% tasks: - debug: var=inventory_hostname ''', 'serial_list.yml': ''' - hosts: all gather_facts: no serial: [1, 2, 3] tasks: - debug: var=inventory_hostname ''', 'serial_list_mixed.yml': ''' - hosts: all gather_facts: no serial: [1, "20%", -1] tasks: - debug: var=inventory_hostname ''', }) mock_inventory = MagicMock() mock_var_manager = MagicMock() # fake out options to use the syntax CLI switch, which will ensure # the PlaybookExecutor doesn't create a TaskQueueManager mock_options = MagicMock() mock_options.syntax.value = True templar = Templar(loader=fake_loader) pbe = PlaybookExecutor( playbooks=['no_serial.yml', 'serial_int.yml', 'serial_pct.yml', 'serial_list.yml', 'serial_list_mixed.yml'], inventory=mock_inventory, variable_manager=mock_var_manager, loader=fake_loader, options=mock_options, passwords=[], ) playbook = Playbook.load(pbe._playbooks[0], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9'] self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9']]) playbook = Playbook.load(pbe._playbooks[1], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9'] self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9']]) playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9'] self.assertEqual(pbe._get_serialized_batches(play), [['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9']]) playbook = Playbook.load(pbe._playbooks[3], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9'] self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1','host2'],['host3','host4','host5'],['host6','host7','host8'],['host9']]) playbook = Playbook.load(pbe._playbooks[4], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9'] self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1','host2'],['host3','host4','host5','host6','host7','host8','host9']]) # Test when serial percent is under 1.0 playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2'] self.assertEqual(pbe._get_serialized_batches(play), [['host0'],['host1'],['host2']]) # Test when there is a remainder for serial as a percent playbook = Playbook.load(pbe._playbooks[2], variable_manager=mock_var_manager, loader=fake_loader) play = playbook.get_plays()[0] play.post_validate(templar) mock_inventory.get_hosts.return_value = ['host0','host1','host2','host3','host4','host5','host6','host7','host8','host9','host10'] self.assertEqual( pbe._get_serialized_batches(play), [['host0','host1'],['host2','host3'],['host4','host5'],['host6','host7'],['host8','host9'],['host10']] )
gpl-3.0
openmv/micropython
tests/extmod/uctypes_sizeof.py
15
1286
try: import uctypes except ImportError: print("SKIP") raise SystemExit desc = { # arr is array at offset 0, of UINT8 elements, array size is 2 "arr": (uctypes.ARRAY | 0, uctypes.UINT8 | 2), # arr2 is array at offset 0, size 2, of structures defined recursively "arr2": (uctypes.ARRAY | 0, 2, {"b": uctypes.UINT8 | 0}), "arr3": (uctypes.ARRAY | 2, uctypes.UINT16 | 2), "arr4": (uctypes.ARRAY | 0, 2, {"b": uctypes.UINT8 | 0, "w": uctypes.UINT16 | 1}), "sub": ( 0, { "b1": uctypes.BFUINT8 | 0 | 4 << uctypes.BF_POS | 4 << uctypes.BF_LEN, "b2": uctypes.BFUINT8 | 0 | 0 << uctypes.BF_POS | 4 << uctypes.BF_LEN, }, ), } data = bytearray(b"01234567") S = uctypes.struct(uctypes.addressof(data), desc, uctypes.LITTLE_ENDIAN) print(uctypes.sizeof(S.arr)) assert uctypes.sizeof(S.arr) == 2 print(uctypes.sizeof(S.arr2)) assert uctypes.sizeof(S.arr2) == 2 print(uctypes.sizeof(S.arr3)) try: print(uctypes.sizeof(S.arr3[0])) except TypeError: print("TypeError") print(uctypes.sizeof(S.arr4)) assert uctypes.sizeof(S.arr4) == 6 print(uctypes.sizeof(S.sub)) assert uctypes.sizeof(S.sub) == 1 # invalid descriptor try: print(uctypes.sizeof([])) except TypeError: print("TypeError")
mit
rennhak/Dia
plug-ins/python/diasvg_import.py
3
22323
# PyDia SVG Import # Copyright (c) 2003, 2004 Hans Breuer <hans@breuer.org> # # Pure Python Dia Import Filter - to show how it is done. # It also tries to be more featureful and robust then the # SVG importer written in C, but as long as PyDia has issues # this will _not_ be the case. Known issues (at least) : # - xlink stuff (should probably have some StdProp equivalent) # - lack of full transformation dealing # - real percentage scaling, is it worth it ? # - see FIXME in this file # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. import string, math, os, re # Dias unit is cm, the default scale should be determined from svg:width and viewBox dfPcm = 35.43307 dfUserScale = 1.0 dfFontSize = 0.7 dfViewLength = 32.0 # wrong approach for "% unit" dictUnitScales = { "em" : 1.0, "ex" : 2.0, #FIXME these should be _relative_ to current font "px" : 1.0 / dfPcm, "pt" : 1.25 / dfPcm, "pc" : 15.0 / dfPcm, "cm" : 35.43307 / dfPcm, "mm" : 3.543307 / dfPcm, "in" : 90.0 / dfPcm} # only compile once rColor = re.compile(r"rgb\s*\(\s*(\d+)[, ]+(\d+)[, +](\d+)\s*\)") # not really parsing numbers (Scaled will deal with more) rTranslate = re.compile(r"translate\s*\(\s*([^,]+),([^)]+)\s*\)") #FIXME: parse more - e.g. AQT - of the strange path data rPathWhat = re.compile("[MmLlCcSsZz]") # what rPathData = re.compile("[^MmLlCcSsZz]+") # data rPathValue = re.compile("[\s,]+") # values def Scaled(s) : # em, ex, px, pt, pc, cm, mm, in, and percentages if s[-1] in string.digits : # use global scale return float(s) * dfUserScale else : unit = s[-2:] try : if unit[0] == "e" : #print "Scaling", unit, dfFontSize return float(s[:-2]) * dfFontSize * dictUnitScales[unit] else : return float(s[:-2]) * dictUnitScales[unit] except : if s[-1] == "%" : return float(s[:-1]) * dfViewLength / 100.0 # warn about invalid unit ?? raise NotImplementedError("Unknown unit %s %s" % (s[:-2], s[-2:])) return float(s) * dfUserScale def Color(s) : # deliver a StdProp compatible Color (or the original string) m = rColor.match(s) if m : return (int(m.group(1)) / 255.0, int(m.group(2)) / 255.0, int(m.group(2)) / 255.0) # any more ugly color definitions not compatible with pango_color_parse() ? return string.strip(s) def _eval (s, _locals) : # eval() can be used to execute aribitray code, see e.g. http://bugzilla.gnome.org/show_bug.cgi?id=317637 # here using *any* builtins is an abuse try : return eval (s, {'__builtins__' : None }, _locals) except NameError : try : import dia dia.message(2, "***Possible exploit attempt***:\n" + s) except ImportError : print "***Possible exploit***:", s return None class Object : def __init__(self) : self.props = {"x" : 0, "y" : 0, "stroke" : "none"} self.translation = None # "line_width", "line_colour", "line_style" def style(self, s) : sp1 = string.split(s, ";") for s1 in sp1 : sp2 = string.split(string.strip(s1), ":") if len(sp2) == 2 : try : _eval("self." + string.replace(sp2[0], "-", "_") + "(\"" + string.strip(sp2[1]) + "\")", locals()) except AttributeError : self.props[sp2[0]] = string.strip(sp2[1]) def x(self, s) : self.props["x"] = Scaled(s) def y(self, s) : self.props["y"] = Scaled(s) def width(self, s) : self.props["width"] = Scaled(s) def height(self, s) : self.props["height"] = Scaled(s) def stroke(self,s) : self.props["stroke"] = s.encode("UTF-8") def stroke_width(self,s) : self.props["stroke-width"] = Scaled(s) def fill(self,s) : self.props["fill"] = s def fill_rule(self,s) : self.props["fill-rule"] = s def stroke_dasharray(self,s) : # just an approximation sp = string.split(s,",") n = len(sp) if n > 0 : # sp[0] == "none" : # ? stupid generator ? try : dlen = Scaled(sp[0]) except : n = 0 if n == 0 : # should not really happen self.props["line-style"] = (0, 1.0) # LINESTYLE_SOLID, elif n == 2 : if dlen > 0.1 : # FIXME: self.props["line-style"] = (1, dlen) # LINESTYLE_DASHED, else : self.props["line-style"] = (4, dlen) # LINESTYLE_DOTTED elif n == 4 : self.props["line-style"] = (2, dlen) # LINESTYLE_DASH_DOT, elif n == 6 : self.props["line-style"] = (3, dlen) # LINESTYLE_DASH_DOT_DOT, def id(self, s) : # just to handle/ignore it self.props["meta"] = { "id" : s } def transform(self, s) : m = rTranslate.match(s) if m : #print "matched", m.group(1), m.group(2), "->", Scaled(m.group(1)), Scaled(m.group(2)) self.translation = (Scaled(m.group(1)), Scaled(m.group(2))) def __repr__(self) : return self.dt + " : " + str(self.props) def Dump(self, indent) : print " " * indent, self def Set(self, d) : pass def ApplyProps(self, o) : pass def CopyProps(self, dest) : # to be used to inherit group props to childs _before_ they get their own # doesn't use the member functions to avoid scaling once more for p in self.props.keys() : dest.props[p] = self.props[p] def Create(self) : ot = dia.get_object_type (self.dt) o, h1, h2 = ot.create(self.props["x"], self.props["y"]) # apply common props if self.props.has_key("stroke-width") and o.properties.has_key("line_width") : o.properties["line_width"] = self.props["stroke-width"] if self.props.has_key("stroke") and o.properties.has_key("line_colour") : if self.props["stroke"] != "none" : try : o.properties["line_colour"] = Color(self.props["stroke"]) except : # rgb(192,27,38) handled by Color() but ... # o.properties["line_colour"] = self.props["stroke"] pass else : # Dia can't really display stroke none, some workaround : if self.props.has_key("fill") and self.props["fill"] != "none" : #does it really matter ? try : o.properties["line_colour"] = Color(self.props["fill"]) except : pass o.properties["line_width"] = 0.0 if self.props.has_key("fill") and o.properties.has_key("fill_colour") : if self.props["fill"] == "none" : o.properties["show_background"] = 0 else : color_key = "fill_colour" try : o.properties["show_background"] = 1 except KeyError : # not sure if this is always true color_key = "text_colour" try : o.properties[color_key] =Color(self.props["fill"]) except : # rgb(192,27,38) handled by Color() but ... # o.properties["fill_colour"] =self.props["fill"] pass if self.props.has_key("line-style") and o.properties.has_key("line_style") : o.properties["line_style"] = self.props["line-style"] if self.props.has_key("meta") and o.properties.has_key("meta") : o.properties["meta"] = self.props["meta"] self.ApplyProps(o) return o class Svg(Object) : # not a placeable object but similar while parsing def __init__(self) : Object.__init__(self) self.dt = "svg" self.bbox_w = None self.bbox_h = None def width(self,s) : global dfUserScale d = dfUserScale dfUserScale = 0.05 self.bbox_w = Scaled(s) self.props["width"] = self.bbox_w dfUserScale = d def height(self,s) : global dfUserScale d = dfUserScale # with stupid info Dia still has a problem cause zooming is limited to 5.0% dfUserScale = 0.05 self.bbox_h = Scaled(s) self.props["height"] = self.bbox_h dfUserScale = d def viewBox(self,s) : global dfUserScale global dfViewLength self.props["viewBox"] = s sp = string.split(s, " ") w = float(sp[2]) - float(sp[0]) h = float(sp[3]) - float(sp[1]) # FIXME: the following relies on the call order of width,height,viewBox # which is _not_ the order it is in the file if self.bbox_w and self.bbox_h : dfUserScale = math.sqrt((self.bbox_w / w)*(self.bbox_h / h)) elif self.bbox_w : dfUserScale = self.bbox_w / w elif self.bbox_h : dfUserScale = self.bbox_h / h # FIXME: ugly, simple aproach to "%" unit dfViewLength = math.sqrt(w*h) def xmlns(self,s) : self.props["xmlns"] = s def version(self,s) : self.props["version"] = s def __repr__(self) : global dfUserScale return Object.__repr__(self) + "\nUserScale : " + str(dfUserScale) def Create(self) : return None class Style(Object) : # the beginning of a css implementation, ... def __init__(self) : global cssStyle Object.__init__(self) self.cdata = "" self.styles = None cssStyle = self def type(self, s) : self.props["type"] = s def Set(self, d) : # consuming all the ugly CDATA self.cdata += d def Lookup(self, st) : if self.styles == None : self.styles = {} # just to check if we are interpreting correctly (better use regex ?) p1 = 0 # position of dot p2 = 0 # ... of opening brace p3 = 0 # ... closing s = self.cdata n = len(s) - 1 while 1 : p1 = string.find(s, ".", p3, n) p2 = string.find(s, "{", p1+1, n) p3 = string.find(s, "}", p2+1, n) if p1 < 0 or p2 < 0 or p3 < 0 : break print s[p1+1:p2-1], s[p2+1:p3] self.styles[s[p1+1:p2-1]] = s[p2+1:p3] if self.styles.has_key(st) : return self.styles[st] return "" def __repr__(self) : self.Lookup("init") # fill the dictionary return "Styles:" + str(self.styles) def Create(self) : return None cssStyle = Style() # a singleton class Group(Object) : def __init__(self) : Object.__init__(self) self.dt = "Group" self.childs = [] def Add(self, o) : self.childs.append(o) def Create(self) : lst = [] for o in self.childs : od = o.Create() if od : #print od #DON'T : layer.add_object(od) lst.append(od) # create group including list objects if len(lst) > 0 : grp = dia.group_create(lst) if self.translation : # want to move by top left corner ... hNW = grp.handles[0] # HANDLE_RESIZE_NW # ... but pos is the point moved pos = grp.properties["obj_pos"].value #FIXME: looking at scascale.py this isn't completely correct x1 = hNW.pos.x + self.translation[0] y1 = hNW.pos.y + self.translation[1] grp.move(x1, y1) return grp else : return None def Dump(self, indent) : print " " * indent, self for o in self.childs : o.Dump(indent + 1) # One of my test files is quite ugly (produced by Batik) : it dumps identical image data # multiple times into the svg. This directory helps to reduce them to the necessary # memory comsumption _imageData = {} class Image(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - Image" def preserveAspectRatio(self,s) : self.props["keep_aspect"] = s def xlink__href(self,s) : #print s if s[:8] == "file:///" : self.props["uri"] = s.encode("UTF-8") elif s[:22] == "data:image/png;base64," : if _imageData.has_key(s[22:]) : self.props["uri"] = _imageData[s[22:]] # use file reference else : # an ugly temporary file name, on windoze in %TEMP% fname = os.tempnam(None, "diapy-") + ".png" dd = s[22:].decode ("base64") f = open(fname, "wb") f.write(dd) f.close() # not really an uri but the reader appears to be robust enough ;-) _imageData[s[22:]] = "file:///" + fname else : pass #FIXME how to import data into dia ?? def Create(self) : if not (self.props.has_key("uri") or self.props.has_key("data")) : return None return Object.Create(self) def ApplyProps(self,o) : if self.props.has_key("width") : o.properties["elem_width"] = self.props["width"] if self.props.has_key("width") : o.properties["elem_height"] = self.props["height"] if self.props.has_key("uri") : o.properties["image_file"] = self.props["uri"][8:] class Line(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - Line" # "line_width". "line_color" # "start_point". "end_point" def x1(self, s) : self.props["x"] = Scaled(s) def y1(self, s) : self.props["y"] = Scaled(s) def x2(self, s) : self.props["x2"] = Scaled(s) def y2(self, s) : self.props["y2"] = Scaled(s) def ApplyProps(self, o) : #pass o.properties["end_point"] = (self.props["x2"], self.props["y2"]) class Path(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - BezierLine" # or Beziergon ? self.pts = [] def d(self, s) : self.props["data"] = s #FIXME: parse more - e.g. AQT - of the strange path data spd = rPathWhat.split(s) spw = rPathData.split(s) i = 1 # current point xc = 0.0; yc = 0.0 # the current or second control point - ugly svg states ;( for s1 in spw : k = 0 # range further adjusted for last possibly empty -k-1 if s1 == "M" : # moveto sp = rPathValue.split(spd[i]) if sp[0] == "" : k = 1 xc = Scaled(sp[k]); yc = Scaled(sp[k+1]) self.pts.append((0, xc, yc)) elif s1 == "L" : #lineto sp = rPathValue.split(spd[i]) if sp[0] == "" : k = 1 for j in range(k, len(sp)-k-1, 2) : xc = Scaled(sp[j]); yc = Scaled(sp[j+1]) self.pts.append((1, xc, yc)) elif s1 == "C" : # curveto sp = rPathValue.split(spd[i]) if sp[0] == "" : k = 1 for j in range(k, len(sp)-k-1, 6) : self.pts.append((2, Scaled(sp[j]), Scaled(sp[j+1]), Scaled(sp[j+2]), Scaled(sp[j+3]), Scaled(sp[j+4]), Scaled(sp[j+5]))) # reflexion second control to current point, really ? xc =2 * Scaled(sp[j+4]) - Scaled(sp[j+2]) yc =2 * Scaled(sp[j+5]) - Scaled(sp[j+3]) elif s1 == "S" : # smooth curveto sp = rPathValue.split(spd[i]) if sp[0] == "" : k = 1 for j in range(k, len(sp)-k-1, 4) : x = Scaled(sp[j+2]) y = Scaled(sp[j+3]) x1 = Scaled(sp[j]) y1 = Scaled(sp[j+1]) self.pts.append((2, xc, yc, # FIXME: current point ? x1, y1, x, y)) xc = 2 * x - x1; yc = 2 * y - y1 elif s1 == "z" or s1 == "Z" : # close self.dt = "Standard - Beziergon" elif s1 == "" : # too much whitespaces ;-) pass else : print "Huh?", s1 break i += 1 def ApplyProps(self,o) : o.properties["bez_points"] = self.pts def Dump(self, indent) : print " " * indent, self for t in self.pts : print " " * indent, t #def Create(self) : # return None # not yet class Rect(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - Box" # "corner_radius", def ApplyProps(self,o) : o.properties["elem_width"] = self.props["width"] o.properties["elem_height"] = self.props["height"] class Ellipse(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - Ellipse" self.props["cx"] = 0 self.props["cy"] = 0 self.props["rx"] = 1 self.props["ry"] = 1 def cx(self,s) : self.props["cx"] = Scaled(s) self.props["x"] = self.props["cx"] - self.props["rx"] def cy(self,s) : self.props["cy"] = Scaled(s) self.props["y"] = self.props["cy"] - self.props["ry"] def rx(self,s) : self.props["rx"] = Scaled(s) self.props["x"] = self.props["cx"] - self.props["rx"] def ry(self,s) : self.props["ry"] = Scaled(s) self.props["y"] = self.props["cy"] - self.props["ry"] def ApplyProps(self,o) : o.properties["elem_width"] = 2.0 * self.props["rx"] o.properties["elem_height"] = 2.0 * self.props["ry"] class Circle(Ellipse) : def __init__(self) : Ellipse.__init__(self) def r(self,s) : Ellipse.rx(self,s) Ellipse.ry(self,s) class Poly(Object) : def __init__(self) : Object.__init__(self) self.dt = None # abstract class ! def points(self,s) : sp1 = string.split(s) pts = [] for s1 in sp1 : sp2 = string.split(s1, ",") if len(sp2) == 2 : pts.append((Scaled(sp2[0]), Scaled(sp2[1]))) self.props["points"] = pts def ApplyProps(self,o) : o.properties["poly_points"] = self.props["points"] class Polygon(Poly) : def __init__(self) : Poly.__init__(self) self.dt = "Standard - Polygon" class Polyline(Poly) : def __init__(self) : Poly.__init__(self) self.dt = "Standard - PolyLine" class Text(Object) : def __init__(self) : Object.__init__(self) self.dt = "Standard - Text" self.props["font-size"] = 1.0 # text_font, text_height, text_color, text_alignment def Set(self, d) : if self.props.has_key("text") : self.props["text"] += d else : self.props["text"] = d def text_anchor(self,s) : self.props["text-anchor"] = s def font_size(self,s) : global dfFontSize # ugh, just maintain another global state if s[-2:-1] != "e" : # FIXME ??? dfFontSize = Scaled(s) #print "FontSize is", dfFontSize self.props["font-size"] = Scaled(s) # ?? self.props["y"] = self.props["y"] - Scaled(s) def font_weight(self, s) : self.props["font-weight"] = s def font_style(self, s) : self.props["font-style"] = s def font_family(self, s) : self.props["font-family"] = s def ApplyProps(self, o) : o.properties["text"] = self.props["text"].encode("UTF-8") if self.props.has_key("text-anchor") : if self.props["text-anchor"] == "middle" : o.properties["text_alignment"] = 1 elif self.props["text-anchor"] == "end" : o.properties["text_alignment"] = 2 else : o.properties["text_alignment"] = 0 if self.props.has_key("fill") : o.properties["text_colour"] = Color(self.props["fill"]) if self.props.has_key("font-size") : o.properties["text_height"] = self.props["font-size"] class Desc(Object) : #FIXME is this useful ? def __init__(self) : Object.__init__(self) self.dt = "UML - Note" def Set(self, d) : if self.props.has_key("text") : self.props["text"] += d else : self.props["text"] = d def Create(self) : if self.props.has_key("text") : pass #dia.message(0, self.props["text"].encode("UTF-8")) return None class Title(Object) : #FIXME is this useful ? def __init__(self) : Object.__init__(self) self.dt = "UML - LargePackage" def Set(self, d) : if self.props.has_key("text") : self.props["text"] += d else : self.props["text"] = d def Create(self) : if self.props.has_key("text") : pass return None class Unknown(Object) : def __init__(self, name) : Object.__init__(self) self.dt = "svg:" + name def Create(self) : return None class Importer : def __init__(self) : self.errors = {} self.objects = [] def Parse(self, sData) : import xml.parsers.expat ctx = [] stack = [] # 3 handler functions def start_element(name, attrs) : #print "<" + name + ">" if 0 == string.find(name, "svg:") : name = name[4:] if len(stack) > 0 : grp = stack[-1] else : grp = None if 'g' == name : o = Group() stack.append(o) elif 'tspan' == name : #FIXME: to take all the style coming with it into account # Dia would need to support layouted text ... txn, txo = ctx[-1] if attrs.has_key("dy") : txo.Set("" + "\n") # just a new line (best we can do?) elif attrs.has_key("dx") : txo.Set(" ") ctx.append((txn, txo)) #push the same object return else : s = string.capitalize(name) + "()" try : # should be safe to use eval() here, by XML rules it can just be a name or would give # xml.parsers.expat.ExpatError: not well-formed (invalid token) o = eval(s) except : o = Unknown(name) if grp : grp.CopyProps(o) for a in attrs : if a == "class" : # eeek : keyword ! st = cssStyle.Lookup(attrs[a]) o.style(st) o.props[a] = attrs[a] continue ma = string.replace(a, "-", "_") # e.g. xlink:href -> xlink__href ma = string.replace(ma, ":", "__") s = "o." + ma + "(\"" + attrs[a] + "\")" try : _eval(s, locals()) except AttributeError, msg : o.props["meta"] = { a : attrs[a] } if not self.errors.has_key(msg) : self.errors[msg] = s except SyntaxError, msg : if not self.errors.has_key(msg) : self.errors[msg] = s if grp is None : self.objects.append(o) else : grp.Add(o) ctx.append((name, o)) #push def end_element(name) : if 'g' == name : del stack[-1] del ctx[-1] # pop def char_data(data): # may be called multiple times for one string ctx[-1][1].Set(data) p = xml.parsers.expat.ParserCreate() p.StartElementHandler = start_element p.EndElementHandler = end_element p.CharacterDataHandler = char_data p.Parse(sData) def Render(self,data) : layer = data.active_layer for o in self.objects : od = o.Create() if od : if o.translation : pos = od.properties["obj_pos"].value #FIXME: looking at scascale.py this isn't completely correct x1 = pos.x + o.translation[0] y1 = pos.y + o.translation[1] od.move(x1, y1) layer.add_object(od) # create an 'Unhandled' layer and dump our Unknown # create an 'Errors' layer and dump our errors if len(self.errors.keys()) > 0 : layer = data.add_layer("Errors") s = "To hide the error messages delete or disable the 'Errors' layer\n" for e in self.errors.keys() : s = s + str(e) + " -> " + str(self.errors[e]) + "\n" o = Text() o.props["fill"] = "red" o.Set(s) layer.add_object(o.Create()) # create a 'Description' layer data.update_extents () return 1 def Dump(self) : for o in self.objects : o.Dump(0) for e in self.errors.keys() : print e, "->", self.errors[e] def Test() : import sys imp = Importer() sName = sys.argv[1] if sName[-1] == "z" : import gzip f = gzip.open(sName) else : f = open(sName) imp.Parse(f.read()) if len(sys.argv) > 2 : sys.stdout = open(sys.argv[2], "wb") imp.Dump() sys.exit(0) if __name__ == '__main__': Test() def import_svg(sFile, diagramData) : imp = Importer() f = open(sFile) imp.Parse(f.read()) return imp.Render(diagramData) def import_svgz(sFile, diagramData) : import gzip imp = Importer() f = gzip.open(sFile) imp.Parse(f.read()) return imp.Render(diagramData) import dia dia.register_import("SVG plain", "svg", import_svg) dia.register_import("SVG compressed", "svgz", import_svgz)
gpl-2.0
xq262144/hue
desktop/core/ext-py/tablib-0.10.0/tablib/packages/openpyxl/writer/excel.py
61
6332
# file openpyxl/writer/excel.py # Copyright (c) 2010 openpyxl # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # # @license: http://www.opensource.org/licenses/mit-license.php # @author: Eric Gazoni """Write a .xlsx file.""" # Python stdlib imports from zipfile import ZipFile, ZIP_DEFLATED from ....compat import BytesIO as StringIO # package imports from ..shared.ooxml import ARC_SHARED_STRINGS, ARC_CONTENT_TYPES, \ ARC_ROOT_RELS, ARC_WORKBOOK_RELS, ARC_APP, ARC_CORE, ARC_THEME, \ ARC_STYLE, ARC_WORKBOOK, \ PACKAGE_WORKSHEETS, PACKAGE_DRAWINGS, PACKAGE_CHARTS from ..writer.strings import create_string_table, write_string_table from ..writer.workbook import write_content_types, write_root_rels, \ write_workbook_rels, write_properties_app, write_properties_core, \ write_workbook from ..writer.theme import write_theme from ..writer.styles import StyleWriter from ..writer.drawings import DrawingWriter, ShapeWriter from ..writer.charts import ChartWriter from ..writer.worksheet import write_worksheet, write_worksheet_rels class ExcelWriter(object): """Write a workbook object to an Excel file.""" def __init__(self, workbook): self.workbook = workbook self.style_writer = StyleWriter(self.workbook) def write_data(self, archive): """Write the various xml files into the zip archive.""" # cleanup all worksheets shared_string_table = self._write_string_table(archive) archive.writestr(ARC_CONTENT_TYPES, write_content_types(self.workbook)) archive.writestr(ARC_ROOT_RELS, write_root_rels(self.workbook)) archive.writestr(ARC_WORKBOOK_RELS, write_workbook_rels(self.workbook)) archive.writestr(ARC_APP, write_properties_app(self.workbook)) archive.writestr(ARC_CORE, write_properties_core(self.workbook.properties)) archive.writestr(ARC_THEME, write_theme()) archive.writestr(ARC_STYLE, self.style_writer.write_table()) archive.writestr(ARC_WORKBOOK, write_workbook(self.workbook)) self._write_worksheets(archive, shared_string_table, self.style_writer) def _write_string_table(self, archive): for ws in self.workbook.worksheets: ws.garbage_collect() shared_string_table = create_string_table(self.workbook) archive.writestr(ARC_SHARED_STRINGS, write_string_table(shared_string_table)) for k, v in shared_string_table.items(): shared_string_table[k] = bytes(v) return shared_string_table def _write_worksheets(self, archive, shared_string_table, style_writer): drawing_id = 1 chart_id = 1 shape_id = 1 for i, sheet in enumerate(self.workbook.worksheets): archive.writestr(PACKAGE_WORKSHEETS + '/sheet%d.xml' % (i + 1), write_worksheet(sheet, shared_string_table, style_writer.get_style_by_hash())) if sheet._charts or sheet.relationships: archive.writestr(PACKAGE_WORKSHEETS + '/_rels/sheet%d.xml.rels' % (i + 1), write_worksheet_rels(sheet, drawing_id)) if sheet._charts: dw = DrawingWriter(sheet) archive.writestr(PACKAGE_DRAWINGS + '/drawing%d.xml' % drawing_id, dw.write()) archive.writestr(PACKAGE_DRAWINGS + '/_rels/drawing%d.xml.rels' % drawing_id, dw.write_rels(chart_id)) drawing_id += 1 for chart in sheet._charts: cw = ChartWriter(chart) archive.writestr(PACKAGE_CHARTS + '/chart%d.xml' % chart_id, cw.write()) if chart._shapes: archive.writestr(PACKAGE_CHARTS + '/_rels/chart%d.xml.rels' % chart_id, cw.write_rels(drawing_id)) sw = ShapeWriter(chart._shapes) archive.writestr(PACKAGE_DRAWINGS + '/drawing%d.xml' % drawing_id, sw.write(shape_id)) shape_id += len(chart._shapes) drawing_id += 1 chart_id += 1 def save(self, filename): """Write data into the archive.""" archive = ZipFile(filename, 'w', ZIP_DEFLATED) self.write_data(archive) archive.close() def save_workbook(workbook, filename): """Save the given workbook on the filesystem under the name filename. :param workbook: the workbook to save :type workbook: :class:`openpyxl.workbook.Workbook` :param filename: the path to which save the workbook :type filename: string :rtype: bool """ writer = ExcelWriter(workbook) writer.save(filename) return True def save_virtual_workbook(workbook): """Return an in-memory workbook, suitable for a Django response.""" writer = ExcelWriter(workbook) temp_buffer = StringIO() try: archive = ZipFile(temp_buffer, 'w', ZIP_DEFLATED) writer.write_data(archive) finally: archive.close() virtual_workbook = temp_buffer.getvalue() temp_buffer.close() return virtual_workbook
apache-2.0
AuyaJackie/odoo
addons/project/__init__.py
436
1141
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import project import company import report import wizard import res_partner import res_config # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kevclarx/ansible
lib/ansible/modules/notification/typetalk.py
36
4011
#!/usr/bin/python # -*- coding: utf-8 -*- # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: typetalk version_added: "1.6" short_description: Send a message to typetalk description: - Send a message to typetalk using typetalk API ( http://developers.typetalk.in/ ) options: client_id: description: - OAuth2 client ID required: true client_secret: description: - OAuth2 client secret required: true topic: description: - topic id to post message required: true msg: description: - message body required: true requirements: [ json ] author: "Takashi Someda (@tksmd)" ''' EXAMPLES = ''' - typetalk: client_id: 12345 client_secret: 12345 topic: 1 msg: install completed ''' import urllib try: import json except ImportError: try: import simplejson as json except ImportError: json = None # import module snippets from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.pycompat24 import get_exception from ansible.module_utils.urls import fetch_url, ConnectionError def do_request(module, url, params, headers=None): data = urllib.urlencode(params) if headers is None: headers = dict() headers = dict(headers, **{ 'User-Agent': 'Ansible/typetalk module', }) r, info = fetch_url(module, url, data=data, headers=headers) if info['status'] != 200: exc = ConnectionError(info['msg']) exc.code = info['status'] raise exc return r def get_access_token(module, client_id, client_secret): params = { 'client_id': client_id, 'client_secret': client_secret, 'grant_type': 'client_credentials', 'scope': 'topic.post' } res = do_request(module, 'https://typetalk.in/oauth2/access_token', params) return json.load(res)['access_token'] def send_message(module, client_id, client_secret, topic, msg): """ send message to typetalk """ try: access_token = get_access_token(module, client_id, client_secret) url = 'https://typetalk.in/api/v1/topics/%d' % topic headers = { 'Authorization': 'Bearer %s' % access_token, } do_request(module, url, {'message': msg}, headers) return True, {'access_token': access_token} except ConnectionError: e = get_exception() return False, e def main(): module = AnsibleModule( argument_spec=dict( client_id=dict(required=True), client_secret=dict(required=True, no_log=True), topic=dict(required=True, type='int'), msg=dict(required=True), ), supports_check_mode=False ) if not json: module.fail_json(msg="json module is required") client_id = module.params["client_id"] client_secret = module.params["client_secret"] topic = module.params["topic"] msg = module.params["msg"] res, error = send_message(module, client_id, client_secret, topic, msg) if not res: module.fail_json(msg='fail to send message with response code %s' % error.code) module.exit_json(changed=True, topic=topic, msg=msg) if __name__ == '__main__': main()
gpl-3.0
dirkmueller/qemu
scripts/tracetool/__init__.py
205
7624
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Machinery for generating tracing-related intermediate files. """ __author__ = "Lluís Vilanova <vilanova@ac.upc.edu>" __copyright__ = "Copyright 2012, Lluís Vilanova <vilanova@ac.upc.edu>" __license__ = "GPL version 2 or (at your option) any later version" __maintainer__ = "Stefan Hajnoczi" __email__ = "stefanha@linux.vnet.ibm.com" import re import sys import tracetool.format import tracetool.backend def error_write(*lines): """Write a set of error lines.""" sys.stderr.writelines("\n".join(lines) + "\n") def error(*lines): """Write a set of error lines and exit.""" error_write(*lines) sys.exit(1) def out(*lines, **kwargs): """Write a set of output lines. You can use kwargs as a shorthand for mapping variables when formating all the strings in lines. """ lines = [ l % kwargs for l in lines ] sys.stdout.writelines("\n".join(lines) + "\n") class Arguments: """Event arguments description.""" def __init__(self, args): """ Parameters ---------- args : List of (type, name) tuples. """ self._args = args @staticmethod def build(arg_str): """Build and Arguments instance from an argument string. Parameters ---------- arg_str : str String describing the event arguments. """ res = [] for arg in arg_str.split(","): arg = arg.strip() if arg == 'void': continue if '*' in arg: arg_type, identifier = arg.rsplit('*', 1) arg_type += '*' identifier = identifier.strip() else: arg_type, identifier = arg.rsplit(None, 1) res.append((arg_type, identifier)) return Arguments(res) def __iter__(self): """Iterate over the (type, name) pairs.""" return iter(self._args) def __len__(self): """Number of arguments.""" return len(self._args) def __str__(self): """String suitable for declaring function arguments.""" if len(self._args) == 0: return "void" else: return ", ".join([ " ".join([t, n]) for t,n in self._args ]) def __repr__(self): """Evaluable string representation for this object.""" return "Arguments(\"%s\")" % str(self) def names(self): """List of argument names.""" return [ name for _, name in self._args ] def types(self): """List of argument types.""" return [ type_ for type_, _ in self._args ] class Event(object): """Event description. Attributes ---------- name : str The event name. fmt : str The event format string. properties : set(str) Properties of the event. args : Arguments The event arguments. """ _CRE = re.compile("((?P<props>.*)\s+)?(?P<name>[^(\s]+)\((?P<args>[^)]*)\)\s*(?P<fmt>\".*)?") _VALID_PROPS = set(["disable"]) def __init__(self, name, props, fmt, args): """ Parameters ---------- name : string Event name. props : list of str Property names. fmt : str Event printing format. args : Arguments Event arguments. """ self.name = name self.properties = props self.fmt = fmt self.args = args unknown_props = set(self.properties) - self._VALID_PROPS if len(unknown_props) > 0: raise ValueError("Unknown properties: %s" % ", ".join(unknown_props)) @staticmethod def build(line_str): """Build an Event instance from a string. Parameters ---------- line_str : str Line describing the event. """ m = Event._CRE.match(line_str) assert m is not None groups = m.groupdict('') name = groups["name"] props = groups["props"].split() fmt = groups["fmt"] args = Arguments.build(groups["args"]) return Event(name, props, fmt, args) def __repr__(self): """Evaluable string representation for this object.""" return "Event('%s %s(%s) %s')" % (" ".join(self.properties), self.name, self.args, self.fmt) def _read_events(fobj): res = [] for line in fobj: if not line.strip(): continue if line.lstrip().startswith('#'): continue res.append(Event.build(line)) return res class TracetoolError (Exception): """Exception for calls to generate.""" pass def try_import(mod_name, attr_name = None, attr_default = None): """Try to import a module and get an attribute from it. Parameters ---------- mod_name : str Module name. attr_name : str, optional Name of an attribute in the module. attr_default : optional Default value if the attribute does not exist in the module. Returns ------- A pair indicating whether the module could be imported and the module or object or attribute value. """ try: module = __import__(mod_name, globals(), locals(), ["__package__"]) if attr_name is None: return True, module return True, getattr(module, str(attr_name), attr_default) except ImportError: return False, None def generate(fevents, format, backend, binary = None, probe_prefix = None): """Generate the output for the given (format, backend) pair. Parameters ---------- fevents : file Event description file. format : str Output format name. backend : str Output backend name. binary : str or None See tracetool.backend.dtrace.BINARY. probe_prefix : str or None See tracetool.backend.dtrace.PROBEPREFIX. """ # fix strange python error (UnboundLocalError tracetool) import tracetool format = str(format) if len(format) is 0: raise TracetoolError("format not set") mformat = format.replace("-", "_") if not tracetool.format.exists(mformat): raise TracetoolError("unknown format: %s" % format) backend = str(backend) if len(backend) is 0: raise TracetoolError("backend not set") mbackend = backend.replace("-", "_") if not tracetool.backend.exists(mbackend): raise TracetoolError("unknown backend: %s" % backend) if not tracetool.backend.compatible(mbackend, mformat): raise TracetoolError("backend '%s' not compatible with format '%s'" % (backend, format)) import tracetool.backend.dtrace tracetool.backend.dtrace.BINARY = binary tracetool.backend.dtrace.PROBEPREFIX = probe_prefix events = _read_events(fevents) if backend == "nop": ( e.properies.add("disable") for e in events ) tracetool.format.generate_begin(mformat, events) tracetool.backend.generate("nop", format, [ e for e in events if "disable" in e.properties ]) tracetool.backend.generate(backend, format, [ e for e in events if "disable" not in e.properties ]) tracetool.format.generate_end(mformat, events)
gpl-2.0
KiChjang/servo
tests/wpt/web-platform-tests/tools/third_party/pywebsocket3/test/testdata/handlers/sub/exception_in_transfer_wsh.py
21
1814
# Copyright 2009, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Exception in web_socket_transfer_data(). """ def web_socket_do_extra_handshake(request): pass def web_socket_transfer_data(request): raise Exception('Intentional Exception for %s, %s' % (request.ws_resource, request.ws_protocol)) # vi:sts=4 sw=4 et
mpl-2.0
liutang123/spark
python/pyspark/shell.py
56
3126
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ An interactive shell. This file is designed to be launched as a PYTHONSTARTUP script. """ import atexit import os import platform import warnings import py4j from pyspark import SparkConf from pyspark.context import SparkContext from pyspark.sql import SparkSession, SQLContext if os.environ.get("SPARK_EXECUTOR_URI"): SparkContext.setSystemProperty("spark.executor.uri", os.environ["SPARK_EXECUTOR_URI"]) SparkContext._ensure_initialized() try: # Try to access HiveConf, it will raise exception if Hive is not added conf = SparkConf() if conf.get('spark.sql.catalogImplementation', 'hive').lower() == 'hive': SparkContext._jvm.org.apache.hadoop.hive.conf.HiveConf() spark = SparkSession.builder\ .enableHiveSupport()\ .getOrCreate() else: spark = SparkSession.builder.getOrCreate() except py4j.protocol.Py4JError: if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive': warnings.warn("Fall back to non-hive support because failing to access HiveConf, " "please make sure you build spark with hive") spark = SparkSession.builder.getOrCreate() except TypeError: if conf.get('spark.sql.catalogImplementation', '').lower() == 'hive': warnings.warn("Fall back to non-hive support because failing to access HiveConf, " "please make sure you build spark with hive") spark = SparkSession.builder.getOrCreate() sc = spark.sparkContext sql = spark.sql atexit.register(lambda: sc.stop()) # for compatibility sqlContext = spark._wrapped sqlCtx = sqlContext print("""Welcome to ____ __ / __/__ ___ _____/ /__ _\ \/ _ \/ _ `/ __/ '_/ /__ / .__/\_,_/_/ /_/\_\ version %s /_/ """ % sc.version) print("Using Python version %s (%s, %s)" % ( platform.python_version(), platform.python_build()[0], platform.python_build()[1])) print("SparkSession available as 'spark'.") # The ./bin/pyspark script stores the old PYTHONSTARTUP value in OLD_PYTHONSTARTUP, # which allows us to execute the user's PYTHONSTARTUP file: _pythonstartup = os.environ.get('OLD_PYTHONSTARTUP') if _pythonstartup and os.path.isfile(_pythonstartup): with open(_pythonstartup) as f: code = compile(f.read(), _pythonstartup, 'exec') exec(code)
apache-2.0
draekko/android_kernel_samsung_kylessopen
Documentation/target/tcm_mod_builder.py
3119
42754
#!/usr/bin/python # The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD # # Copyright (c) 2010 Rising Tide Systems # Copyright (c) 2010 Linux-iSCSI.org # # Author: nab@kernel.org # import os, sys import subprocess as sub import string import re import optparse tcm_dir = "" fabric_ops = [] fabric_mod_dir = "" fabric_mod_port = "" fabric_mod_init_port = "" def tcm_mod_err(msg): print msg sys.exit(1) def tcm_mod_create_module_subdir(fabric_mod_dir_var): if os.path.isdir(fabric_mod_dir_var) == True: return 1 print "Creating fabric_mod_dir: " + fabric_mod_dir_var ret = os.mkdir(fabric_mod_dir_var) if ret: tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var) return def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n" buf += " u64 nport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n" buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* FC lport target portal group tag for TCM */\n" buf += " u16 lport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n" buf += " struct " + fabric_mod_name + "_lport *lport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n" buf += "\n" buf += "struct " + fabric_mod_name + "_lport {\n" buf += " /* SCSI protocol the lport is providing */\n" buf += " u8 lport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n" buf += " u64 lport_wwpn;\n" buf += " /* ASCII formatted WWPN for FC Target Lport */\n" buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n" buf += " struct se_wwn lport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "lport" fabric_mod_init_port = "nport" return def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n" buf += " u64 iport_wwpn;\n" buf += " /* ASCII formatted WWPN for Sas Initiator port */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* SAS port target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* Binary World Wide unique Port Name for SAS Target port */\n" buf += " u64 tport_wwpn;\n" buf += " /* ASCII formatted WWPN for SAS Target port */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name): global fabric_mod_port global fabric_mod_init_port buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n" buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n" buf += "\n" buf += "struct " + fabric_mod_name + "_nacl {\n" buf += " /* ASCII formatted InitiatorName */\n" buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n" buf += " struct se_node_acl se_node_acl;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tpg {\n" buf += " /* iSCSI target portal group tag for TCM */\n" buf += " u16 tport_tpgt;\n" buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n" buf += " struct " + fabric_mod_name + "_tport *tport;\n" buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n" buf += " struct se_portal_group se_tpg;\n" buf += "};\n\n" buf += "struct " + fabric_mod_name + "_tport {\n" buf += " /* SCSI protocol the tport is providing */\n" buf += " u8 tport_proto_id;\n" buf += " /* ASCII formatted TargetName for IQN */\n" buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n" buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n" buf += " struct se_wwn tport_wwn;\n" buf += "};\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() fabric_mod_port = "tport" fabric_mod_init_port = "iport" return def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name): if proto_ident == "FC": tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "SAS": tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name) elif proto_ident == "iSCSI": tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name) else: print "Unsupported proto_ident: " + proto_ident sys.exit(1) return def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c" print "Writing file: " + f p = open(f, 'w'); if not p: tcm_mod_err("Unable to open file: " + f) buf = "#include <linux/module.h>\n" buf += "#include <linux/moduleparam.h>\n" buf += "#include <linux/version.h>\n" buf += "#include <generated/utsrelease.h>\n" buf += "#include <linux/utsname.h>\n" buf += "#include <linux/init.h>\n" buf += "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/configfs.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_transport.h>\n" buf += "#include <target/target_core_fabric_ops.h>\n" buf += "#include <target/target_core_fabric_configfs.h>\n" buf += "#include <target/target_core_fabric_lib.h>\n" buf += "#include <target/target_core_device.h>\n" buf += "#include <target/target_core_tpg.h>\n" buf += "#include <target/target_core_configfs.h>\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/configfs_macros.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "/* Local pointer to allocated TCM configfs fabric module */\n" buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n" buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n" buf += " u32 nexus_depth;\n\n" buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n" buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n" buf += " if (!(se_nacl_new))\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n" buf += " nexus_depth = 1;\n" buf += " /*\n" buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n" buf += " * when converting a NodeACL from demo mode -> explict\n" buf += " */\n" buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n" buf += " name, nexus_depth);\n" buf += " if (IS_ERR(se_nacl)) {\n" buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n" buf += " return se_nacl;\n" buf += " }\n" buf += " /*\n" buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n" buf += " */\n" buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n" buf += " return se_nacl;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n" buf += " kfree(nacl);\n" buf += "}\n\n" buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n" buf += " struct se_wwn *wwn,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n" buf += " struct " + fabric_mod_name + "_tpg *tpg;\n" buf += " unsigned long tpgt;\n" buf += " int ret;\n\n" buf += " if (strstr(name, \"tpgt_\") != name)\n" buf += " return ERR_PTR(-EINVAL);\n" buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n" buf += " return ERR_PTR(-EINVAL);\n\n" buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n" buf += " if (!(tpg)) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n" buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n" buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n" buf += " &tpg->se_tpg, (void *)tpg,\n" buf += " TRANSPORT_TPG_TYPE_NORMAL);\n" buf += " if (ret < 0) {\n" buf += " kfree(tpg);\n" buf += " return NULL;\n" buf += " }\n" buf += " return &tpg->se_tpg;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n" buf += " core_tpg_deregister(se_tpg);\n" buf += " kfree(tpg);\n" buf += "}\n\n" buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " struct config_group *group,\n" buf += " const char *name)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " u64 wwpn = 0;\n\n" buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n" buf += " return ERR_PTR(-EINVAL); */\n\n" buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n" buf += " if (!(" + fabric_mod_port + ")) {\n" buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n" buf += " return ERR_PTR(-ENOMEM);\n" buf += " }\n" if proto_ident == "FC" or proto_ident == "SAS": buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n" buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "__NAMELEN, wwpn); */\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n" buf += "}\n\n" buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n" buf += " kfree(" + fabric_mod_port + ");\n" buf += "}\n\n" buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n" buf += " struct target_fabric_configfs *tf,\n" buf += " char *page)\n" buf += "{\n" buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += "}\n\n" buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n" buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n" buf += " &" + fabric_mod_name + "_wwn_version.attr,\n" buf += " NULL,\n" buf += "};\n\n" buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n" buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n" buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n" buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n" buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n" buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n" buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n" buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n" buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n" buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n" buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n" buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n" buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n" buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n" buf += " .release_cmd_to_pool = " + fabric_mod_name + "_release_cmd,\n" buf += " .release_cmd_direct = " + fabric_mod_name + "_release_cmd,\n" buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n" buf += " .close_session = " + fabric_mod_name + "_close_session,\n" buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n" buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n" buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n" buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n" buf += " .sess_get_initiator_sid = NULL,\n" buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n" buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n" buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n" buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n" buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n" buf += " .new_cmd_failure = " + fabric_mod_name + "_new_cmd_failure,\n" buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n" buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n" buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n" buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n" buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n" buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n" buf += " .pack_lun = " + fabric_mod_name + "_pack_lun,\n" buf += " /*\n" buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n" buf += " */\n" buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n" buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n" buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n" buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n" buf += " .fabric_post_link = NULL,\n" buf += " .fabric_pre_unlink = NULL,\n" buf += " .fabric_make_np = NULL,\n" buf += " .fabric_drop_np = NULL,\n" buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n" buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n" buf += "};\n\n" buf += "static int " + fabric_mod_name + "_register_configfs(void)\n" buf += "{\n" buf += " struct target_fabric_configfs *fabric;\n" buf += " int ret;\n\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n" buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n" buf += " utsname()->machine);\n" buf += " /*\n" buf += " * Register the top level struct config_item_type with TCM core\n" buf += " */\n" buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n" buf += " if (!(fabric)) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n" buf += " return -ENOMEM;\n" buf += " }\n" buf += " /*\n" buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n" buf += " */\n" buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n" buf += " /*\n" buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n" buf += " */\n" buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n" buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n" buf += " /*\n" buf += " * Register the fabric for use within TCM\n" buf += " */\n" buf += " ret = target_fabric_configfs_register(fabric);\n" buf += " if (ret < 0) {\n" buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n" buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n" buf += " return ret;\n" buf += " }\n" buf += " /*\n" buf += " * Setup our local pointer to *fabric\n" buf += " */\n" buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void " + fabric_mod_name + "_deregister_configfs(void)\n" buf += "{\n" buf += " if (!(" + fabric_mod_name + "_fabric_configfs))\n" buf += " return;\n\n" buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n" buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n" buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n" buf += "};\n\n" buf += "static int __init " + fabric_mod_name + "_init(void)\n" buf += "{\n" buf += " int ret;\n\n" buf += " ret = " + fabric_mod_name + "_register_configfs();\n" buf += " if (ret < 0)\n" buf += " return ret;\n\n" buf += " return 0;\n" buf += "};\n\n" buf += "static void " + fabric_mod_name + "_exit(void)\n" buf += "{\n" buf += " " + fabric_mod_name + "_deregister_configfs();\n" buf += "};\n\n" buf += "#ifdef MODULE\n" buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n" buf += "MODULE_LICENSE(\"GPL\");\n" buf += "module_init(" + fabric_mod_name + "_init);\n" buf += "module_exit(" + fabric_mod_name + "_exit);\n" buf += "#endif\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_scan_fabric_ops(tcm_dir): fabric_ops_api = tcm_dir + "include/target/target_core_fabric_ops.h" print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api process_fo = 0; p = open(fabric_ops_api, 'r') line = p.readline() while line: if process_fo == 0 and re.search('struct target_core_fabric_ops {', line): line = p.readline() continue if process_fo == 0: process_fo = 1; line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) continue line = p.readline() # Search for function pointer if not re.search('\(\*', line): continue fabric_ops.append(line.rstrip()) p.close() return def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name): buf = "" bufi = "" f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h" print "Writing file: " + fi pi = open(fi, 'w') if not pi: tcm_mod_err("Unable to open file: " + fi) buf = "#include <linux/slab.h>\n" buf += "#include <linux/kthread.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/list.h>\n" buf += "#include <linux/types.h>\n" buf += "#include <linux/string.h>\n" buf += "#include <linux/ctype.h>\n" buf += "#include <asm/unaligned.h>\n" buf += "#include <scsi/scsi.h>\n" buf += "#include <scsi/scsi_host.h>\n" buf += "#include <scsi/scsi_device.h>\n" buf += "#include <scsi/scsi_cmnd.h>\n" buf += "#include <scsi/libfc.h>\n\n" buf += "#include <target/target_core_base.h>\n" buf += "#include <target/target_core_transport.h>\n" buf += "#include <target/target_core_fabric_ops.h>\n" buf += "#include <target/target_core_fabric_lib.h>\n" buf += "#include <target/target_core_device.h>\n" buf += "#include <target/target_core_tpg.h>\n" buf += "#include <target/target_core_configfs.h>\n\n" buf += "#include \"" + fabric_mod_name + "_base.h\"\n" buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n" buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n" buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n" total_fabric_ops = len(fabric_ops) i = 0 while i < total_fabric_ops: fo = fabric_ops[i] i += 1 # print "fabric_ops: " + fo if re.search('get_fabric_name', fo): buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n" buf += "{\n" buf += " return \"" + fabric_mod_name[4:] + "\";\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n" continue if re.search('get_fabric_proto_ident', fo): buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " u8 proto_id;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n" buf += " break;\n" buf += " }\n\n" buf += " return proto_id;\n" buf += "}\n\n" bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n" if re.search('get_wwn', fo): buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n" buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n" if re.search('get_tag', fo): buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " return tpg->" + fabric_mod_port + "_tpgt;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n" if re.search('get_default_depth', fo): buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n" if re.search('get_pr_transport_id\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code,\n" buf += " unsigned char *buf)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n" buf += " format_code, buf);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *, unsigned char *);\n" if re.search('get_pr_transport_id_len\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl,\n" buf += " struct t10_pr_registration *pr_reg,\n" buf += " int *format_code)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " int ret = 0;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n" buf += " format_code);\n" buf += " break;\n" buf += " }\n\n" buf += " return ret;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n" bufi += " struct se_node_acl *, struct t10_pr_registration *,\n" bufi += " int *);\n" if re.search('parse_pr_out_transport_id\)\(', fo): buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " const char *buf,\n" buf += " u32 *out_tid_len,\n" buf += " char **port_nexus_ptr)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n" buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n" buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n" buf += " char *tid = NULL;\n\n" buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n" if proto_ident == "FC": buf += " case SCSI_PROTOCOL_FCP:\n" buf += " default:\n" buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "SAS": buf += " case SCSI_PROTOCOL_SAS:\n" buf += " default:\n" buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" elif proto_ident == "iSCSI": buf += " case SCSI_PROTOCOL_ISCSI:\n" buf += " default:\n" buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n" buf += " port_nexus_ptr);\n" buf += " }\n\n" buf += " return tid;\n" buf += "}\n\n" bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n" bufi += " const char *, u32 *, char **);\n" if re.search('alloc_fabric_acl\)\(', fo): buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n" buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n" buf += " if (!(nacl)) {\n" buf += " printk(KERN_ERR \"Unable to alocate struct " + fabric_mod_name + "_nacl\\n\");\n" buf += " return NULL;\n" buf += " }\n\n" buf += " return &nacl->se_node_acl;\n" buf += "}\n\n" bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n" if re.search('release_fabric_acl\)\(', fo): buf += "void " + fabric_mod_name + "_release_fabric_acl(\n" buf += " struct se_portal_group *se_tpg,\n" buf += " struct se_node_acl *se_nacl)\n" buf += "{\n" buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n" buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n" buf += " kfree(nacl);\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n" bufi += " struct se_node_acl *);\n" if re.search('tpg_get_inst_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n" buf += "{\n" buf += " return 1;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n" if re.search('release_cmd_to_pool', fo): buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n" if re.search('shutdown_session\)\(', fo): buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n" if re.search('close_session\)\(', fo): buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n" if re.search('stop_session\)\(', fo): buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n" if re.search('fall_back_to_erl0\)\(', fo): buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n" if re.search('sess_logged_in\)\(', fo): buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n" if re.search('sess_get_index\)\(', fo): buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n" if re.search('write_pending\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n" if re.search('write_pending_status\)\(', fo): buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n" if re.search('set_default_node_attributes\)\(', fo): buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n" if re.search('get_task_tag\)\(', fo): buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n" if re.search('get_cmd_state\)\(', fo): buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n" if re.search('new_cmd_failure\)\(', fo): buf += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return;\n" buf += "}\n\n" bufi += "void " + fabric_mod_name + "_new_cmd_failure(struct se_cmd *);\n" if re.search('queue_data_in\)\(', fo): buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n" if re.search('queue_status\)\(', fo): buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n" if re.search('queue_tm_rsp\)\(', fo): buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n" if re.search('get_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n" if re.search('set_fabric_sense_len\)\(', fo): buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n" if re.search('is_state_remove\)\(', fo): buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n" buf += "{\n" buf += " return 0;\n" buf += "}\n\n" bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n" if re.search('pack_lun\)\(', fo): buf += "u64 " + fabric_mod_name + "_pack_lun(unsigned int lun)\n" buf += "{\n" buf += " WARN_ON(lun >= 256);\n" buf += " /* Caller wants this byte-swapped */\n" buf += " return cpu_to_le64((lun & 0xff) << 8);\n" buf += "}\n\n" bufi += "u64 " + fabric_mod_name + "_pack_lun(unsigned int);\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() ret = pi.write(bufi) if ret: tcm_mod_err("Unable to write fi: " + fi) pi.close() return def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Makefile" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n" buf += " " + fabric_mod_name + "_configfs.o\n" buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name): buf = "" f = fabric_mod_dir_var + "/Kconfig" print "Writing file: " + f p = open(f, 'w') if not p: tcm_mod_err("Unable to open file: " + f) buf = "config " + fabric_mod_name.upper() + "\n" buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n" buf += " depends on TARGET_CORE && CONFIGFS_FS\n" buf += " default n\n" buf += " ---help---\n" buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n" ret = p.write(buf) if ret: tcm_mod_err("Unable to write f: " + f) p.close() return def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name): buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n" kbuild = tcm_dir + "/drivers/target/Makefile" f = open(kbuild, 'a') f.write(buf) f.close() return def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name): buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n" kconfig = tcm_dir + "/drivers/target/Kconfig" f = open(kconfig, 'a') f.write(buf) f.close() return def main(modname, proto_ident): # proto_ident = "FC" # proto_ident = "SAS" # proto_ident = "iSCSI" tcm_dir = os.getcwd(); tcm_dir += "/../../" print "tcm_dir: " + tcm_dir fabric_mod_name = modname fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name print "Set fabric_mod_name: " + fabric_mod_name print "Set fabric_mod_dir: " + fabric_mod_dir print "Using proto_ident: " + proto_ident if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI": print "Unsupported proto_ident: " + proto_ident sys.exit(1) ret = tcm_mod_create_module_subdir(fabric_mod_dir) if ret: print "tcm_mod_create_module_subdir() failed because module already exists!" sys.exit(1) tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_scan_fabric_ops(tcm_dir) tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name) tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name) tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kbuild(tcm_dir, fabric_mod_name) input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ") if input == "yes" or input == "y": tcm_mod_add_kconfig(tcm_dir, fabric_mod_name) return parser = optparse.OptionParser() parser.add_option('-m', '--modulename', help='Module name', dest='modname', action='store', nargs=1, type='string') parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident', action='store', nargs=1, type='string') (opts, args) = parser.parse_args() mandatories = ['modname', 'protoident'] for m in mandatories: if not opts.__dict__[m]: print "mandatory option is missing\n" parser.print_help() exit(-1) if __name__ == "__main__": main(str(opts.modname), opts.protoident)
gpl-2.0
wanatpj/h_blind
diffenerce_histogram.py
1
1713
import Image import matplotlib.pyplot as plt import numpy import os from optparse import OptionParser from common import * def _parse_flags(): global indir, rangeradius parser = OptionParser() parser.add_option("-i", "--in", dest="indir", help="directory that containes images for which the difference histogram"\ + " will be computed", metavar="DIR") parser.add_option("-r", "--rangeradius", dest="rangeradius", help="range of the histogram", metavar="NUMBER") (options, args) = parser.parse_args() if not options.indir or not options.rangeradius: parser.error('Not all flags specified; run with --help to see the flags;') indir = options.indir rangeradius = int(options.rangeradius) def extract_differences(f): with Image.open(f) as image: width, height = image.size result = [] img = image.convert("L").load() for x in range(width - 1): for y in range(height): result.append(img[x, y] - img[x + 1, y]) for x in range(width): for y in range(height - 1): result.append(img[x, y] - img[x, y + 1]) return result def histogram_reduce(histogram, values): for value in values: histogram[value + 255] += 1 return histogram def main(): global indir, rangeradius _parse_flags() normalize_file_names_fn = numpy.vectorize(lambda x: indir + "/" + x) result = map_reduce(normalize_file_names_fn(os.listdir(indir)),\ extract_differences,\ histogram_reduce, numpy.zeros(256 + 255, dtype=numpy.uint64)) plt.bar(numpy.arange(-rangeradius, rangeradius + 1), result[255 - rangeradius : 255 + rangeradius + 1], align='center') plt.show() main()
gpl-3.0
cuongnv23/ansible
lib/ansible/modules/network/aci/aci_l3out_route_tag_policy.py
22
3883
#!/usr/bin/python # -*- coding: utf-8 -*- # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = r''' --- module: aci_l3out_route_tag_policy short_description: Manage route tag policies on Cisco ACI fabrics (l3ext:RouteTagPol) description: - Manage route tag policies on Cisco ACI fabrics. - More information from the internal APIC class I(l3ext:RouteTagPol) at U(https://developer.cisco.com/media/mim-ref/MO-l3extRouteTagPol.html). author: - Swetha Chunduri (@schunduri) - Dag Wieers (@dagwieers) - Jacob McGill (@jmcgill298) version_added: '2.4' requirements: - ACI Fabric 1.0(3f)+ notes: - The C(tenant) used must exist before using this module in your playbook. The M(aci_tenant) module can be used for this. options: rtp: description: - The name of the route tag policy. required: yes aliases: [ name, rtp_name ] description: description: - The description for the route tag policy. aliases: [ descr ] tenant: description: - The name of the tenant. required: yes aliases: [ tenant_name ] tag: description: - The value of the route tag (range 0-4294967295). default: '4294967295' state: description: - Use C(present) or C(absent) for adding or removing. - Use C(query) for listing an object or multiple objects. choices: [ absent, present, query ] default: present extends_documentation_fragment: aci ''' # FIXME: Add more, better examples EXAMPLES = r''' - aci_l3out_route_tag_policy: hostname: apic username: admin password: SomeSecretPassword rtp: '{{ rtp_name }}' tenant: production tag: '{{ tag }}' description: '{{ description }}' ''' RETURN = r''' # ''' from ansible.module_utils.aci import ACIModule, aci_argument_spec from ansible.module_utils.basic import AnsibleModule def main(): argument_spec = aci_argument_spec argument_spec.update( rtp=dict(type='str', required=False, aliases=['name', 'rtp_name']), # Not required for querying all objects tenant=dict(type='str', required=False, aliases=['tenant_name']), # Not required for quering all objects description=dict(type='str', aliases=['descr']), tag=dict(type='int'), state=dict(type='str', default='present', choices=['absent', 'present', 'query']), method=dict(type='str', choices=['delete', 'get', 'post'], aliases=['action'], removed_in_version='2.6'), # Deprecated starting from v2.6 ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, required_if=[ ['state', 'absent', ['rtp', 'tenant']], ['state', 'present', ['rtp', 'tenant']], ], ) rtp = module.params['rtp'] description = module.params['description'] tag = module.params['tag'] state = module.params['state'] aci = ACIModule(module) aci.construct_url(root_class='tenant', subclass_1='rtp') aci.get_existing() if state == 'present': # Filter out module parameters with null values aci.payload( aci_class='l3extRouteTagPol', class_config=dict( name=rtp, descr=description, tag=tag, ), ) # Generate config diff which will be used as POST request body aci.get_diff(aci_class='l3extRouteTagPol') # Submit changes if module not in check_mode and the proposed is different than existing aci.post_config() elif state == 'absent': aci.delete_config() module.exit_json(**aci.result) if __name__ == "__main__": main()
gpl-3.0
jonahbull/py-amqp
extra/release/sphinx-to-rst.py
44
1900
#!/usr/bin/env python import os import re import sys dirname = "" RE_CODE_BLOCK = re.compile(r'.. code-block:: (.+?)\s*$') RE_INCLUDE = re.compile(r'.. include:: (.+?)\s*$') RE_REFERENCE = re.compile(r':(.+?):`(.+?)`') def include_file(lines, pos, match): global dirname orig_filename = match.groups()[0] filename = os.path.join(dirname, orig_filename) fh = open(filename) try: old_dirname = dirname dirname = os.path.dirname(orig_filename) try: lines[pos] = sphinx_to_rst(fh) finally: dirname = old_dirname finally: fh.close() def replace_code_block(lines, pos, match): lines[pos] = "" curpos = pos - 1 # Find the first previous line with text to append "::" to it. while True: prev_line = lines[curpos] if not prev_line.isspace(): prev_line_with_text = curpos break curpos -= 1 if lines[prev_line_with_text].endswith(":"): lines[prev_line_with_text] += ":" else: lines[prev_line_with_text] += "::" TO_RST_MAP = {RE_CODE_BLOCK: replace_code_block, RE_REFERENCE: r'``\2``', RE_INCLUDE: include_file} def _process(lines): lines = list(lines) # non-destructive for i, line in enumerate(lines): for regex, alt in TO_RST_MAP.items(): if callable(alt): match = regex.match(line) if match: alt(lines, i, match) line = lines[i] else: lines[i] = regex.sub(alt, line) return lines def sphinx_to_rst(fh): return "".join(_process(fh)) if __name__ == "__main__": global dirname dirname = os.path.dirname(sys.argv[1]) fh = open(sys.argv[1]) try: print(sphinx_to_rst(fh)) finally: fh.close()
lgpl-2.1
aallai/pyobfsproxy
obfsproxy/test/test_aes.py
18
3375
import unittest from Crypto.Cipher import AES from Crypto.Util import Counter import obfsproxy.common.aes as aes import twisted.trial.unittest class testAES_CTR_128_NIST(twisted.trial.unittest.TestCase): def _helper_test_vector(self, input_block, output_block, plaintext, ciphertext): self.assertEqual(long(input_block.encode('hex'), 16), self.ctr.next_value()) ct = self.cipher.encrypt(plaintext) self.assertEqual(ct, ciphertext) # XXX how do we extract the keystream out of the AES object? def test_nist(self): # Prepare the cipher key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c" iv = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" self.ctr = Counter.new(128, initial_value=long(iv.encode('hex'), 16)) self.cipher = AES.new(key, AES.MODE_CTR, counter=self.ctr) input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" output_block = "\xec\x8c\xdf\x73\x98\x60\x7c\xb0\xf2\xd2\x16\x75\xea\x9e\xa1\xe4" plaintext = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" ciphertext = "\x87\x4d\x61\x91\xb6\x20\xe3\x26\x1b\xef\x68\x64\x99\x0d\xb6\xce" self._helper_test_vector(input_block, output_block, plaintext, ciphertext) input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x00" output_block = "\x36\x2b\x7c\x3c\x67\x73\x51\x63\x18\xa0\x77\xd7\xfc\x50\x73\xae" plaintext = "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" ciphertext = "\x98\x06\xf6\x6b\x79\x70\xfd\xff\x86\x17\x18\x7b\xb9\xff\xfd\xff" self._helper_test_vector(input_block, output_block, plaintext, ciphertext) input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x01" output_block = "\x6a\x2c\xc3\x78\x78\x89\x37\x4f\xbe\xb4\xc8\x1b\x17\xba\x6c\x44" plaintext = "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" ciphertext = "\x5a\xe4\xdf\x3e\xdb\xd5\xd3\x5e\x5b\x4f\x09\x02\x0d\xb0\x3e\xab" self._helper_test_vector(input_block, output_block, plaintext, ciphertext) input_block = "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xff\x02" output_block = "\xe8\x9c\x39\x9f\xf0\xf1\x98\xc6\xd4\x0a\x31\xdb\x15\x6c\xab\xfe" plaintext = "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10" ciphertext = "\x1e\x03\x1d\xda\x2f\xbe\x03\xd1\x79\x21\x70\xa0\xf3\x00\x9c\xee" self._helper_test_vector(input_block, output_block, plaintext, ciphertext) class testAES_CTR_128_simple(twisted.trial.unittest.TestCase): def test_encrypt_decrypt_small_ASCII(self): """ Validate that decryption and encryption work as intended on a small ASCII string. """ self.key = "\xe3\xb0\xc4\x42\x98\xfc\x1c\x14\x9a\xfb\xf4\xc8\x99\x6f\xb9\x24" self.iv = "\x27\xae\x41\xe4\x64\x9b\x93\x4c\xa4\x95\x99\x1b\x78\x52\xb8\x55" test_string = "This unittest kills fascists." cipher1 = aes.AES_CTR_128(self.key, self.iv) cipher2 = aes.AES_CTR_128(self.key, self.iv) ct = cipher1.crypt(test_string) pt = cipher2.crypt(ct) self.assertEqual(test_string, pt) if __name__ == '__main__': unittest.main()
bsd-3-clause
iDTLabssl/hr
__unported__/hr_worked_days_from_timesheet/hr_payslip.py
21
5337
# -*- coding:utf-8 -*- ############################################################################## # # Copyright (C) 2012 - 2014 Odoo Canada. All Rights Reserved. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import orm from openerp.tools.translate import _ from datetime import datetime from openerp.tools import DEFAULT_SERVER_DATE_FORMAT class hr_payslip(orm.Model): _name = 'hr.payslip' _inherit = 'hr.payslip' def timesheet_mapping( self, cr, uid, timesheet_sheets, payslip, date_from, date_to, date_format, context=None, ): """This function takes timesheet objects imported from the timesheet module and creates a dict of worked days to be created in the payslip. """ worked_days = {} # Create one worked days record for each timesheet sheet for ts_sheet in timesheet_sheets: # Get formated date from the timesheet sheet date_from = datetime.strptime( ts_sheet.date_from, DEFAULT_SERVER_DATE_FORMAT ).strftime(date_format) # Create a worked days record with no time worked_days[ts_sheet.id] = { 'name': _('Timesheet %s') % date_from, 'number_of_hours': 0, 'contract_id': payslip.contract_id.id, 'code': 'TS', 'imported_from_timesheet': True, } for ts in ts_sheet.timesheet_ids: # The timesheet_sheet overlaps the payslip period, # but this does not mean that every timesheet in it # overlaps the payslip period. if date_from <= ts.date <= date_to: worked_days[ts_sheet.id][ 'number_of_hours' ] += ts.unit_amount return worked_days def import_worked_days( self, cr, uid, payslip_id, context=None ): """This method retreives the employee's timesheets for a payslip period and creates worked days records from the imported timesheets """ payslip = self.browse(cr, uid, payslip_id, context=context)[0] employee = payslip.employee_id date_from = payslip.date_from date_to = payslip.date_to # get user date format lang_pool = self.pool['res.lang'] user_pool = self.pool['res.users'] code = user_pool.context_get(cr, uid).get('lang', 'en_US') lang_id = lang_pool.search( cr, uid, [('code', '=', code)], context=context ) date_format = lang_pool.read( cr, uid, lang_id, ['date_format'], context=context )[0]['date_format'] # Delete old imported worked_days # The reason to delete these records is that the user may make # corrections to his timesheets and then reimport these. old_worked_days_ids = [ wd.id for wd in payslip.worked_days_line_ids # We only remove records that were imported from # timesheets and not those manually entered. if wd.imported_from_timesheet ] self.pool.get( 'hr.payslip.worked_days' ).unlink(cr, uid, old_worked_days_ids, context) # get timesheet sheets of employee timesheet_sheets = [ ts_sheet for ts_sheet in employee.timesheet_sheet_ids if ( # We need only the timesheet sheets that overlap # the payslip period. date_from <= ts_sheet.date_from <= date_to or date_from <= ts_sheet.date_to <= date_to ) # We want only approved timesheets and ts_sheet.state == 'done' ] if not timesheet_sheets: raise orm.except_orm( _("Warning"), _("""\ Sorry, but there is no approved Timesheets for the entire Payslip period"""), ) # The reason to call this method is for other modules to modify it. worked_days = self.timesheet_mapping( cr, uid, timesheet_sheets, payslip, date_from, date_to, date_format, context=context, ) worked_days = [(0, 0, wd) for key, wd in worked_days.items()] self.write( cr, uid, payslip_id, {'worked_days_line_ids': worked_days}, context=context )
agpl-3.0
Maratyszcza/confu
confu/tools/collection.py
1
1202
from __future__ import absolute_import import logging logger = logging.getLogger("confu") class ToolCollection: def __init__(self, target): self.target = target def __iter__(self): from confu.tools import Tool for name in dir(self): member = getattr(self, name) if isinstance(member, Tool): yield member def __str__(self): return "toolset [" + ", ".join(str(tool) for tool in self) + "]" def __repr__(self): return str(self) def __getattr__(self, name): if name.startswith("__") and name.endswith("__"): # Query for built-in method, e.g. dir raise AttributeError() import confu.globals if name in confu.globals.tools: return confu.globals.tools[name] from confu.tools import Tool tool = Tool.for_name(name, self.target) confu.globals.tools[name] = tool setattr(self, name, tool) return tool def _record_vars(self, ninja): for tool in self: tool._record_vars(ninja) def _record_rules(self, ninja): for tool in self: tool._record_rules(ninja)
mit
htwenhe/DJOA
env/Lib/site-packages/django/contrib/gis/db/backends/postgis/adapter.py
373
1695
""" This object provides quoting for GEOS geometries into PostgreSQL/PostGIS. """ from __future__ import unicode_literals from psycopg2 import Binary from psycopg2.extensions import ISQLQuote class PostGISAdapter(object): def __init__(self, geom, geography=False): "Initializes on the geometry." # Getting the WKB (in string form, to allow easy pickling of # the adaptor) and the SRID from the geometry. self.ewkb = bytes(geom.ewkb) self.srid = geom.srid self.geography = geography self._adapter = Binary(self.ewkb) def __conform__(self, proto): # Does the given protocol conform to what Psycopg2 expects? if proto == ISQLQuote: return self else: raise Exception('Error implementing psycopg2 protocol. Is psycopg2 installed?') def __eq__(self, other): if not isinstance(other, PostGISAdapter): return False return (self.ewkb == other.ewkb) and (self.srid == other.srid) def __hash__(self): return hash((self.ewkb, self.srid)) def __str__(self): return self.getquoted() def prepare(self, conn): """ This method allows escaping the binary in the style required by the server's `standard_conforming_string` setting. """ self._adapter.prepare(conn) def getquoted(self): "Returns a properly quoted string for use in PostgreSQL/PostGIS." # psycopg will figure out whether to use E'\\000' or '\000' return str('%s(%s)' % ( 'ST_GeogFromWKB' if self.geography else 'ST_GeomFromEWKB', self._adapter.getquoted().decode()) )
mit
tum-pbs/PhiFlow
phi/field/_angular_velocity.py
1
2369
from collections import Callable from numbers import Number from phi import math from ._field import Field from ..geom import Geometry from ..math import Shape, GLOBAL_AXIS_ORDER, spatial class AngularVelocity(Field): """ Model of a single vortex or set of vortices. The falloff of the velocity magnitude can be controlled. Without a specified falloff, the velocity increases linearly with the distance from the vortex center. This is the case with rotating rigid bodies, for example. """ def __init__(self, location: math.Tensor or tuple or list or Number, strength: math.Tensor or Number = 1.0, falloff: Callable = None, component: str = None): location = math.wrap(location) strength = math.wrap(strength) assert location.shape.channel.names == ('vector',), "location must have a single channel dimension called 'vector'" assert location.shape.spatial.is_empty, "location tensor cannot have any spatial dimensions" self.location = location self.strength = strength self.falloff = falloff self.component = component spatial_names = [GLOBAL_AXIS_ORDER.axis_name(i, location.vector.size) for i in range(location.vector.size)] self._shape = location.shape & spatial(**{dim: 1 for dim in spatial_names}) def _sample(self, geometry: Geometry) -> math.Tensor: points = geometry.center distances = points - self.location strength = self.strength if self.falloff is None else self.strength * self.falloff(distances) velocity = math.cross_product(strength, distances) velocity = math.sum(velocity, self.location.shape.batch.without(points.shape)) if self.component: velocity = velocity.vector[self.component] return velocity @property def shape(self) -> Shape: return self._shape def __getitem__(self, item: dict): assert all(dim == 'vector' for dim in item), f"Cannot slice AngularVelocity with {item}" if 'vector' in item: assert item['vector'] == 0 or self.component is None component = self.shape.spatial.names[item['vector']] return AngularVelocity(self.location, self.strength, self.falloff, component) else: return self
mit
NeutronUfscarDatacom/DriverDatacom
config.py
1
1591
from oslo.config import cfg """ Configuration for the datacom switch. The options are used to locate datacom switches and identify openstack. The dm_username and dm_password are the credentials and the dm_host is the IP for the switch. The region name is the identifier for the controller. """ DATACOM_DRIVER_OPTS = [ cfg.StrOpt('dm_username', default='', help=_('(required) Username for the dm connection.' 'If not set, the identification will fail.')), cfg.StrOpt('dm_password', default='', secret=True, # do not expose value in the logs help=_('(required) Password for the dm connection.' 'If not set, the identification will fail.')), cfg.StrOpt('dm_host', default='', help=_('(required) IP to be connected to..' 'If not set, the identification will fail.')), cfg.IntOpt('dm_port', default=443, help=_('Port to be connected to, default is 443.')), cfg.StrOpt('dm_method', default='https', help=_('Connection method (default is https)')), cfg.StrOpt('region_name', default='RegionOne', help=_('If multiple OpenStack/Neutron controllers are involved,' 'region_name identifies each one. Most of the times the ' 'value will be "RegionOne", which is the default value ')) ] def setup_config(): cfg.CONF.register_opts(DATACOM_DRIVER_OPTS, "ml2_datacom")
apache-2.0
makacodewalker/etsgh
django/contrib/gis/tests/layermap/models.py
239
2241
from django.contrib.gis.db import models class State(models.Model): name = models.CharField(max_length=20) objects = models.GeoManager() class County(models.Model): name = models.CharField(max_length=25) state = models.ForeignKey(State) mpoly = models.MultiPolygonField(srid=4269) # Multipolygon in NAD83 objects = models.GeoManager() class CountyFeat(models.Model): name = models.CharField(max_length=25) poly = models.PolygonField(srid=4269) objects = models.GeoManager() class City(models.Model): name = models.CharField(max_length=25) population = models.IntegerField() density = models.DecimalField(max_digits=7, decimal_places=1) dt = models.DateField() point = models.PointField() objects = models.GeoManager() class Interstate(models.Model): name = models.CharField(max_length=20) length = models.DecimalField(max_digits=6, decimal_places=2) path = models.LineStringField() objects = models.GeoManager() # Same as `City` above, but for testing model inheritance. class CityBase(models.Model): name = models.CharField(max_length=25) population = models.IntegerField() density = models.DecimalField(max_digits=7, decimal_places=1) point = models.PointField() objects = models.GeoManager() class ICity1(CityBase): dt = models.DateField() class ICity2(ICity1): dt_time = models.DateTimeField(auto_now=True) class Invalid(models.Model): point = models.PointField() # Mapping dictionaries for the models above. co_mapping = {'name' : 'Name', 'state' : {'name' : 'State'}, # ForeignKey's use another mapping dictionary for the _related_ Model (State in this case). 'mpoly' : 'MULTIPOLYGON', # Will convert POLYGON features into MULTIPOLYGONS. } cofeat_mapping = {'name' : 'Name', 'poly' : 'POLYGON', } city_mapping = {'name' : 'Name', 'population' : 'Population', 'density' : 'Density', 'dt' : 'Created', 'point' : 'POINT', } inter_mapping = {'name' : 'Name', 'length' : 'Length', 'path' : 'LINESTRING', }
bsd-3-clause
scanny/python-pptx
pptx/dml/chtfmt.py
1
1409
# encoding: utf-8 """ |ChartFormat| and related objects. |ChartFormat| acts as proxy for the `spPr` element, which provides visual shape properties such as line and fill for chart elements. """ from __future__ import absolute_import, division, print_function, unicode_literals from .fill import FillFormat from .line import LineFormat from ..shared import ElementProxy from ..util import lazyproperty class ChartFormat(ElementProxy): """ The |ChartFormat| object provides access to visual shape properties for chart elements like |Axis|, |Series|, and |MajorGridlines|. It has two properties, :attr:`fill` and :attr:`line`, which return a |FillFormat| and |LineFormat| object respectively. The |ChartFormat| object is provided by the :attr:`format` property on the target axis, series, etc. """ __slots__ = ("_fill", "_line") @lazyproperty def fill(self): """ |FillFormat| instance for this object, providing access to fill properties such as fill color. """ spPr = self._element.get_or_add_spPr() return FillFormat.from_fill_parent(spPr) @lazyproperty def line(self): """ The |LineFormat| object providing access to the visual properties of this object, such as line color and line style. """ spPr = self._element.get_or_add_spPr() return LineFormat(spPr)
mit
ejpbruel/servo
tests/wpt/css-tests/tools/manifest/manifest.py
89
12720
import json import os from collections import defaultdict from item import item_types, ManualTest, WebdriverSpecTest, Stub, RefTest, TestharnessTest from log import get_logger from sourcefile import SourceFile from utils import from_os_path, to_os_path CURRENT_VERSION = 2 class ManifestError(Exception): pass class ManifestVersionMismatch(ManifestError): pass class Manifest(object): def __init__(self, git_rev=None, url_base="/"): # Dict of item_type: {path: set(manifest_items)} self._data = dict((item_type, defaultdict(set)) for item_type in item_types) self.rev = git_rev self.url_base = url_base self.local_changes = LocalChanges(self) # reftest nodes arranged as {path: set(manifest_items)} self.reftest_nodes = defaultdict(set) self.reftest_nodes_by_url = {} def _included_items(self, include_types=None): if include_types is None: include_types = item_types for item_type in include_types: paths = self._data[item_type].copy() for local_types, local_paths in self.local_changes.itertypes(item_type): for path, items in local_paths.iteritems(): paths[path] = items for path in self.local_changes.iterdeleted(): if path in paths: del paths[path] yield item_type, paths def contains_path(self, path): return any(path in paths for _, paths in self._included_items()) def add(self, item): if item is None: return is_reference = False if isinstance(item, RefTest): self.reftest_nodes[item.path].add(item) self.reftest_nodes_by_url[item.url] = item is_reference = item.is_reference if not is_reference: self._add(item) item.manifest = self def _add(self, item): self._data[item.item_type][item.path].add(item) def extend(self, items): for item in items: self.add(item) def remove_path(self, path): for item_type in item_types: if path in self._data[item_type]: del self._data[item_type][path] def itertypes(self, *types): if not types: types = None for item_type, items in self._included_items(types): for item in sorted(items.items()): yield item def __iter__(self): for item in self.itertypes(): yield item def __getitem__(self, path): for _, paths in self._included_items(): if path in paths: return paths[path] raise KeyError def get_reference(self, url): if url in self.local_changes.reftest_nodes_by_url: return self.local_changes.reftest_nodes_by_url[url] if url in self.reftest_nodes_by_url: return self.reftest_nodes_by_url[url] return None def _committed_with_path(self, rel_path): rv = set() for paths_items in self._data.itervalues(): rv |= paths_items.get(rel_path, set()) if rel_path in self.reftest_nodes: rv |= self.reftest_nodes[rel_path] return rv def _committed_paths(self): rv = set() for paths_items in self._data.itervalues(): rv |= set(paths_items.keys()) return rv def update(self, tests_root, url_base, new_rev, committed_changes=None, local_changes=None, remove_missing_local=False): if local_changes is None: local_changes = {} if committed_changes is not None: for rel_path, status in committed_changes: self.remove_path(rel_path) if status == "modified": use_committed = rel_path in local_changes source_file = SourceFile(tests_root, rel_path, url_base, use_committed=use_committed) self.extend(source_file.manifest_items()) self.local_changes = LocalChanges(self) local_paths = set() for rel_path, status in local_changes.iteritems(): local_paths.add(rel_path) if status == "modified": existing_items = self._committed_with_path(rel_path) source_file = SourceFile(tests_root, rel_path, url_base, use_committed=False) local_items = set(source_file.manifest_items()) updated_items = local_items - existing_items self.local_changes.extend(updated_items) else: self.local_changes.add_deleted(rel_path) if remove_missing_local: for path in self._committed_paths() - local_paths: self.local_changes.add_deleted(path) self.update_reftests() if new_rev is not None: self.rev = new_rev self.url_base = url_base def update_reftests(self): reftest_nodes = self.reftest_nodes.copy() for path, items in self.local_changes.reftest_nodes.iteritems(): reftest_nodes[path] |= items #TODO: remove locally deleted files tests = set() for items in reftest_nodes.values(): tests |= set(item for item in items if not item.is_reference) has_inbound = set() for path, items in reftest_nodes.iteritems(): for item in items: for ref_url, ref_type in item.references: has_inbound.add(ref_url) if self.local_changes.reftest_nodes: target = self.local_changes else: target = self #TODO: Warn if there exist unreachable reftest nodes for path, items in reftest_nodes.iteritems(): for item in items: if item.url in has_inbound: continue target._data["reftest"][path].add(item) def to_json(self): out_items = { item_type: sorted( test.to_json() for _, tests in items.iteritems() for test in tests ) for item_type, items in self._data.iteritems() } reftest_nodes = {from_os_path(key): [v.to_json() for v in value] for key, value in self.reftest_nodes.iteritems()} rv = {"url_base": self.url_base, "rev": self.rev, "local_changes": self.local_changes.to_json(), "items": out_items, "reftest_nodes": reftest_nodes, "version": CURRENT_VERSION} return rv @classmethod def from_json(cls, tests_root, obj): version = obj.get("version") if version != CURRENT_VERSION: raise ManifestVersionMismatch self = cls(git_rev=obj["rev"], url_base=obj.get("url_base", "/")) if not hasattr(obj, "iteritems"): raise ManifestError item_classes = {"testharness": TestharnessTest, "reftest": RefTest, "manual": ManualTest, "stub": Stub, "wdspec": WebdriverSpecTest} source_files = {} for k, values in obj["items"].iteritems(): if k not in item_types: raise ManifestError for v in values: manifest_item = item_classes[k].from_json(self, tests_root, v, source_files=source_files) self._add(manifest_item) for path, values in obj["reftest_nodes"].iteritems(): path = to_os_path(path) for v in values: item = RefTest.from_json(self, tests_root, v, source_files=source_files) self.reftest_nodes[path].add(item) self.reftest_nodes_by_url[v["url"]] = item self.local_changes = LocalChanges.from_json(self, tests_root, obj["local_changes"], source_files=source_files) return self class LocalChanges(object): def __init__(self, manifest): self.manifest = manifest self._data = dict((item_type, defaultdict(set)) for item_type in item_types) self._deleted = set() self.reftest_nodes = defaultdict(set) self.reftest_nodes_by_url = {} def add(self, item): if item is None: return is_reference = False if isinstance(item, RefTest): self.reftest_nodes[item.path].add(item) self.reftest_nodes_by_url[item.url] = item is_reference = item.is_reference if not is_reference: self._add(item) item.manifest = self.manifest def _add(self, item): self._data[item.item_type][item.path].add(item) def extend(self, items): for item in items: self.add(item) def add_deleted(self, path): self._deleted.add(path) def is_deleted(self, path): return path in self._deleted def itertypes(self, *types): for item_type in types: yield item_type, self._data[item_type] def iterdeleted(self): for item in self._deleted: yield item def __getitem__(self, item_type): return self._data[item_type] def to_json(self): reftest_nodes = {from_os_path(key): [v.to_json() for v in value] for key, value in self.reftest_nodes.iteritems()} rv = {"items": defaultdict(dict), "reftest_nodes": reftest_nodes, "deleted": [from_os_path(path) for path in self._deleted]} for test_type, paths in self._data.iteritems(): for path, tests in paths.iteritems(): path = from_os_path(path) rv["items"][test_type][path] = [test.to_json() for test in tests] return rv @classmethod def from_json(cls, manifest, tests_root, obj, source_files=None): self = cls(manifest) if not hasattr(obj, "iteritems"): raise ManifestError item_classes = {"testharness": TestharnessTest, "reftest": RefTest, "manual": ManualTest, "stub": Stub, "wdspec": WebdriverSpecTest} for test_type, paths in obj["items"].iteritems(): for path, tests in paths.iteritems(): for test in tests: manifest_item = item_classes[test_type].from_json(manifest, tests_root, test, source_files=source_files) self.add(manifest_item) for path, values in obj["reftest_nodes"].iteritems(): path = to_os_path(path) for v in values: item = RefTest.from_json(self.manifest, tests_root, v, source_files=source_files) self.reftest_nodes[path].add(item) self.reftest_nodes_by_url[item.url] = item for item in obj["deleted"]: self.add_deleted(to_os_path(item)) return self def load(tests_root, manifest): logger = get_logger() # "manifest" is a path or file-like object. if isinstance(manifest, basestring): if os.path.exists(manifest): logger.debug("Opening manifest at %s" % manifest) else: logger.debug("Creating new manifest at %s" % manifest) try: with open(manifest) as f: rv = Manifest.from_json(tests_root, json.load(f)) except IOError: rv = Manifest(None) return rv return Manifest.from_json(tests_root, json.load(manifest)) def write(manifest, manifest_path): with open(manifest_path, "wb") as f: json.dump(manifest.to_json(), f, sort_keys=True, indent=2, separators=(',', ': ')) f.write("\n")
mpl-2.0
gg7/sentry
src/sentry/web/frontend/organization_api_keys.py
23
2234
from __future__ import absolute_import from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from operator import or_ from sentry.models import ( ApiKey, AuditLogEntry, AuditLogEntryEvent, OrganizationMemberType ) from sentry.web.frontend.base import OrganizationView DEFAULT_SCOPES = [ 'project:read', 'event:read', 'team:read', 'org:read', 'member:read', ] class OrganizationApiKeysView(OrganizationView): required_access = OrganizationMemberType.ADMIN def handle(self, request, organization): if request.POST.get('op') == 'newkey': key = ApiKey.objects.create( organization=organization, scopes=reduce(or_, [getattr(ApiKey.scopes, s) for s in DEFAULT_SCOPES]) ) AuditLogEntry.objects.create( organization=organization, actor=request.user, ip_address=request.META['REMOTE_ADDR'], target_object=key.id, event=AuditLogEntryEvent.APIKEY_ADD, data=key.get_audit_log_data(), ) redirect_uri = reverse('sentry-organization-api-key-settings', args=[ organization.slug, key.id, ]) return HttpResponseRedirect(redirect_uri) elif request.POST.get('op') == 'removekey': key = ApiKey.objects.get( id=request.POST.get('kid'), organization=organization, ) audit_data = key.get_audit_log_data() key.delete() AuditLogEntry.objects.create( organization=organization, actor=request.user, ip_address=request.META['REMOTE_ADDR'], target_object=key.id, event=AuditLogEntryEvent.APIKEY_REMOVE, data=audit_data, ) return HttpResponseRedirect(request.path) key_list = sorted(ApiKey.objects.filter( organization=organization, ), key=lambda x: x.label) context = { 'key_list': key_list, } return self.respond('sentry/organization-api-keys.html', context)
bsd-3-clause
TheWardoctor/Wardoctors-repo
script.module.covenant/lib/resources/lib/sources/en/to_be_fixed/needsfixing/bobby.py
9
5820
# -*- coding: utf-8 -*- ''' Covenant Add-on This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. ''' import re, urllib, urlparse, base64, json from resources.lib.modules import cleantitle from resources.lib.modules import client from resources.lib.modules import directstream from resources.lib.modules import cache class source: def __init__(self): self.priority = 1 self.language = ['en'] self.domains = ['bobbyhd.com'] self.base_link = 'http://webapp.bobbyhd.com' self.search_link = '/search.php?keyword=%s' self.player_link = '/player.php?alias=%s' def matchAlias(self, title, aliases): try: for alias in aliases: if cleantitle.get(title) == cleantitle.get(alias['title']): return True except: return False def searchMovie(self, title, year, aliases, headers): try: title = cleantitle.normalize(title) title = cleantitle.getsearch(title) query = self.search_link % ('%s+%s' % (urllib.quote_plus(title), year)) query = urlparse.urljoin(self.base_link, query) r = client.request(query, timeout='15', headers=headers, mobile=True) match = re.compile('alias=(.+?)\'">(.+?)</a>').findall(r) match = [(i[0], re.findall('(.+?) \((\d{4})', i[1])) for i in match] match = [(i[0], i[1][0][0], i[1][0][1]) for i in match if len(i[1]) > 0] r = [(i[0],i[1]) for i in match if self.matchAlias(i[1], aliases) and year == i[2]][0] return r except: return def movie(self, imdb, title, localtitle, aliases, year): try: headers = {'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69'} aliases.append({'country': 'us', 'title': title}) r = self.searchMovie(title, year, aliases, headers) url = {'type': 'movie', 'id': r[0], 'episode': 0, 'headers': headers} url = urllib.urlencode(url) return url except: return def tvshow(self, imdb, tvdb, tvshowtitle, localtvshowtitle, aliases, year): try: headers = {'User-Agent': 'Mozilla/5.0 (iPhone; CPU iPhone OS 9_3_2 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Mobile/13F69'} aliases.append({'country': 'us', 'title': tvshowtitle}) url = {'tvshowtitle': tvshowtitle, 'year': year, 'headers': headers, 'aliases': aliases} url = urllib.urlencode(url) return url except: return def episode(self, url, imdb, tvdb, title, premiered, season, episode): try: data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) headers = eval(data['headers']) aliases = eval(data['aliases']) title = data['tvshowtitle'] if 'tvshowtitle' in data else data['title'] title = cleantitle.getsearch(title) query = self.search_link % (urllib.quote_plus(title)) query = urlparse.urljoin(self.base_link, query) r = client.request(query, headers=headers, timeout='30', mobile=True) match = re.compile('alias=(.+?)\'">(.+?)</a>').findall(r) r = [(i[0], re.findall('(.+?)\s+-\s+Season\s+(\d+)', i[1])) for i in match] r = [(i[0], i[1][0][0], i[1][0][1]) for i in r if len(i[1]) > 0] r = [i[0] for i in r if self.matchAlias(i[1], aliases) and int(season) == int(i[2])][0] url = {'type': 'tvshow', 'id': r, 'episode': episode, 'season': season, 'headers': headers} url = urllib.urlencode(url) return url except: return def sources(self, url, hostDict, hostprDict): try: sources = [] data = urlparse.parse_qs(url) data = dict([(i, data[i][0]) if data[i] else (i, '') for i in data]) if data['id'] == None: return sources headers = eval(data['headers']) url = urlparse.urljoin(self.base_link, self.player_link % data['id']) r = client.request(url, headers=headers, timeout='30', mobile=True) if data['type'] == 'tvshow': match = re.compile('changevideo\(\'(.+?)\'\)".+?data-toggle="tab">(.+?)\..+?</a>').findall(r) else: match = re.compile('changevideo\(\'(.+?)\'\)".+?data-toggle="tab">(.+?)</a>').findall(r) for url, ep in match: try: if data['type'] == 'tvshow': if int(data['episode']) != int(ep): raise Exception() quality = directstream.googletag(url)[0]['quality'] sources.append({'source': 'gvideo', 'quality': quality, 'language': 'en', 'url': url, 'direct': True, 'debridonly': False}) except: pass return sources except: return sources def resolve(self, url): return directstream.googlepass(url)
apache-2.0
eviljeff/zamboni
mkt/constants/applications.py
3
1455
from tower import ugettext_lazy as _ class DEVICE_DESKTOP(object): id = 1 name = _(u'Desktop') class_name = 'desktop' api_name = 'desktop' class DEVICE_MOBILE(object): id = 2 name = _(u'Firefox Mobile') class_name = 'android-mobile' api_name = 'android-mobile' class DEVICE_TABLET(object): id = 3 name = _(u'Firefox Tablet') class_name = 'android-tablet' api_name = 'android-tablet' class DEVICE_GAIA(object): id = 4 name = _(u'Firefox OS') class_name = 'firefoxos' api_name = 'firefoxos' DEVICE_TYPE_LIST = [DEVICE_DESKTOP, DEVICE_MOBILE, DEVICE_TABLET, DEVICE_GAIA] DEVICE_TYPES = dict((d.id, d) for d in DEVICE_TYPE_LIST) REVERSE_DEVICE_LOOKUP = dict((d.id, d.api_name) for d in DEVICE_TYPE_LIST) DEVICE_LOOKUP = dict((d.api_name, d) for d in DEVICE_TYPE_LIST) # For search and feed. DEVICE_CHOICES_IDS = { 'desktop': DEVICE_DESKTOP.id, 'mobile': DEVICE_MOBILE.id, 'tablet': DEVICE_TABLET.id, 'firefoxos': DEVICE_GAIA.id, } def get_device(request): # Fireplace sends `dev` and `device`. See the API docs. When `dev` is # 'android' we also need to check `device` to pick a device object. dev = request.GET.get('dev') device = request.GET.get('device') if dev == 'android' and device: dev = '%s-%s' % (dev, device) return DEVICE_LOOKUP.get(dev) def get_device_id(request): return getattr(get_device(request), 'id', None)
bsd-3-clause
juergenhamel/cuon
cuon_server/src/cuon/Reports/report_grave_plant_lists.py
2
2537
# -*- coding: utf-8 -*- ##Copyright (C) [2003-2007] [Jürgen Hamel, D-32584 Löhne] ##This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as ##published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. ##This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied ##warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License ##for more details. ##You should have received a copy of the GNU General Public License along with this program; if not, write to the ##Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. import os import types class report_grave_plant_lists: def __init__(self, nRows = 0, sName = 'standard', iOrderSort=0): self.nRows = nRows self.sName = sName self.iOrderSort = iOrderSort self.dicReportData = {} self.dicReportData['Title'] = _(sName + ' generatet by CUON') self.dicReportData['lPageNumber'] = _('Pagenumber:') self.dicReportData['fPageNumber'] = 1 self.dicReportData['Designation'] = _('Designation') def getReportData(self, dicSearchfields, dicUser, oGrave, reportDefs): self.dicResults = {} sReportfile = reportDefs['ReportPath'] + '/' + self.sName # to do # check, if to load pictures self.fileName = reportDefs['DocumentPathListsGraves'] + '/' +_( self.sName + '-') + `dicUser['Name']` +`self.nRows` + '.pdf' reportDefs['pdfFile'] = os.path.normpath(self.fileName) dicUser['iOrderSort'] = self.iOrderSort dicResult = oGrave.getGravePlantListValues( dicSearchfields, dicUser, self.nRows) #print dicResult #print '++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++*' self.dicResults['grave'] = dicResult self.dicResults['articles'], self.dicResults['sums'] = oGrave.getGravePlantListArticles( dicSearchfields, dicUser, self.nRows) #self.loadXmlReport('addresses_phonelist1', 'ReportAddressLists') #values in this order: # 1 reportname # 2 dicUser # 3 dicResults # 4 dicReportData # 5 reportDefs return sReportfile, dicUser, self.dicResults, self.dicReportData, reportDefs
gpl-3.0
bosstb/HaberPush
youtube_dl/extractor/soundcloud.py
14
20503
# coding: utf-8 from __future__ import unicode_literals import re import itertools from .common import ( InfoExtractor, SearchInfoExtractor ) from ..compat import ( compat_str, compat_urlparse, compat_urllib_parse_urlencode, ) from ..utils import ( ExtractorError, int_or_none, unified_strdate, ) class SoundcloudIE(InfoExtractor): """Information extractor for soundcloud.com To access the media, the uid of the song and a stream token must be extracted from the page source and the script must make a request to media.soundcloud.com/crossdomain.xml. Then the media can be grabbed by requesting from an url composed of the stream token and uid """ _VALID_URL = r'''(?x)^(?:https?://)? (?:(?:(?:www\.|m\.)?soundcloud\.com/ (?P<uploader>[\w\d-]+)/ (?!(?:tracks|sets(?:/.+?)?|reposts|likes|spotlight)/?(?:$|[?#])) (?P<title>[\w\d-]+)/? (?P<token>[^?]+?)?(?:[?].*)?$) |(?:api\.soundcloud\.com/tracks/(?P<track_id>\d+) (?:/?\?secret_token=(?P<secret_token>[^&]+))?) |(?P<player>(?:w|player|p.)\.soundcloud\.com/player/?.*?url=.*) ) ''' IE_NAME = 'soundcloud' _TESTS = [ { 'url': 'http://soundcloud.com/ethmusic/lostin-powers-she-so-heavy', 'md5': 'ebef0a451b909710ed1d7787dddbf0d7', 'info_dict': { 'id': '62986583', 'ext': 'mp3', 'upload_date': '20121011', 'description': 'No Downloads untill we record the finished version this weekend, i was too pumped n i had to post it , earl is prolly gonna b hella p.o\'d', 'uploader': 'E.T. ExTerrestrial Music', 'title': 'Lostin Powers - She so Heavy (SneakPreview) Adrian Ackers Blueprint 1', 'duration': 143, 'license': 'all-rights-reserved', } }, # not streamable song { 'url': 'https://soundcloud.com/the-concept-band/goldrushed-mastered?in=the-concept-band/sets/the-royal-concept-ep', 'info_dict': { 'id': '47127627', 'ext': 'mp3', 'title': 'Goldrushed', 'description': 'From Stockholm Sweden\r\nPovel / Magnus / Filip / David\r\nwww.theroyalconcept.com', 'uploader': 'The Royal Concept', 'upload_date': '20120521', 'duration': 227, 'license': 'all-rights-reserved', }, 'params': { # rtmp 'skip_download': True, }, }, # private link { 'url': 'https://soundcloud.com/jaimemf/youtube-dl-test-video-a-y-baw/s-8Pjrp', 'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604', 'info_dict': { 'id': '123998367', 'ext': 'mp3', 'title': 'Youtube - Dl Test Video \'\' Ä↭', 'uploader': 'jaimeMF', 'description': 'test chars: \"\'/\\ä↭', 'upload_date': '20131209', 'duration': 9, 'license': 'all-rights-reserved', }, }, # private link (alt format) { 'url': 'https://api.soundcloud.com/tracks/123998367?secret_token=s-8Pjrp', 'md5': 'aa0dd32bfea9b0c5ef4f02aacd080604', 'info_dict': { 'id': '123998367', 'ext': 'mp3', 'title': 'Youtube - Dl Test Video \'\' Ä↭', 'uploader': 'jaimeMF', 'description': 'test chars: \"\'/\\ä↭', 'upload_date': '20131209', 'duration': 9, 'license': 'all-rights-reserved', }, }, # downloadable song { 'url': 'https://soundcloud.com/oddsamples/bus-brakes', 'md5': '7624f2351f8a3b2e7cd51522496e7631', 'info_dict': { 'id': '128590877', 'ext': 'mp3', 'title': 'Bus Brakes', 'description': 'md5:0053ca6396e8d2fd7b7e1595ef12ab66', 'uploader': 'oddsamples', 'upload_date': '20140109', 'duration': 17, 'license': 'cc-by-sa', }, }, ] _CLIENT_ID = '2t9loNQH90kzJcsFCODdigxfp325aq4z' _IPHONE_CLIENT_ID = '376f225bf427445fc4bfb6b99b72e0bf' @staticmethod def _extract_urls(webpage): return [m.group('url') for m in re.finditer( r'<iframe[^>]+src=(["\'])(?P<url>(?:https?://)?(?:w\.)?soundcloud\.com/player.+?)\1', webpage)] def report_resolve(self, video_id): """Report information extraction.""" self.to_screen('%s: Resolving id' % video_id) @classmethod def _resolv_url(cls, url): return 'http://api.soundcloud.com/resolve.json?url=' + url + '&client_id=' + cls._CLIENT_ID def _extract_info_dict(self, info, full_title=None, quiet=False, secret_token=None): track_id = compat_str(info['id']) name = full_title or track_id if quiet: self.report_extraction(name) thumbnail = info.get('artwork_url') if isinstance(thumbnail, compat_str): thumbnail = thumbnail.replace('-large', '-t500x500') ext = 'mp3' result = { 'id': track_id, 'uploader': info.get('user', {}).get('username'), 'upload_date': unified_strdate(info.get('created_at')), 'title': info['title'], 'description': info.get('description'), 'thumbnail': thumbnail, 'duration': int_or_none(info.get('duration'), 1000), 'webpage_url': info.get('permalink_url'), 'license': info.get('license'), } formats = [] if info.get('downloadable', False): # We can build a direct link to the song format_url = ( 'https://api.soundcloud.com/tracks/{0}/download?client_id={1}'.format( track_id, self._CLIENT_ID)) formats.append({ 'format_id': 'download', 'ext': info.get('original_format', 'mp3'), 'url': format_url, 'vcodec': 'none', 'preference': 10, }) # We have to retrieve the url format_dict = self._download_json( 'http://api.soundcloud.com/i1/tracks/%s/streams' % track_id, track_id, 'Downloading track url', query={ 'client_id': self._CLIENT_ID, 'secret_token': secret_token, }) for key, stream_url in format_dict.items(): abr = int_or_none(self._search_regex( r'_(\d+)_url', key, 'audio bitrate', default=None)) if key.startswith('http'): stream_formats = [{ 'format_id': key, 'ext': ext, 'url': stream_url, }] elif key.startswith('rtmp'): # The url doesn't have an rtmp app, we have to extract the playpath url, path = stream_url.split('mp3:', 1) stream_formats = [{ 'format_id': key, 'url': url, 'play_path': 'mp3:' + path, 'ext': 'flv', }] elif key.startswith('hls'): stream_formats = self._extract_m3u8_formats( stream_url, track_id, 'mp3', entry_protocol='m3u8_native', m3u8_id=key, fatal=False) else: continue for f in stream_formats: f['abr'] = abr formats.extend(stream_formats) if not formats: # We fallback to the stream_url in the original info, this # cannot be always used, sometimes it can give an HTTP 404 error formats.append({ 'format_id': 'fallback', 'url': info['stream_url'] + '?client_id=' + self._CLIENT_ID, 'ext': ext, }) for f in formats: f['vcodec'] = 'none' self._check_formats(formats, track_id) self._sort_formats(formats) result['formats'] = formats return result def _real_extract(self, url): mobj = re.match(self._VALID_URL, url, flags=re.VERBOSE) if mobj is None: raise ExtractorError('Invalid URL: %s' % url) track_id = mobj.group('track_id') if track_id is not None: info_json_url = 'http://api.soundcloud.com/tracks/' + track_id + '.json?client_id=' + self._CLIENT_ID full_title = track_id token = mobj.group('secret_token') if token: info_json_url += '&secret_token=' + token elif mobj.group('player'): query = compat_urlparse.parse_qs(compat_urlparse.urlparse(url).query) real_url = query['url'][0] # If the token is in the query of the original url we have to # manually add it if 'secret_token' in query: real_url += '?secret_token=' + query['secret_token'][0] return self.url_result(real_url) else: # extract uploader (which is in the url) uploader = mobj.group('uploader') # extract simple title (uploader + slug of song title) slug_title = mobj.group('title') token = mobj.group('token') full_title = resolve_title = '%s/%s' % (uploader, slug_title) if token: resolve_title += '/%s' % token self.report_resolve(full_title) url = 'http://soundcloud.com/%s' % resolve_title info_json_url = self._resolv_url(url) info = self._download_json(info_json_url, full_title, 'Downloading info JSON') return self._extract_info_dict(info, full_title, secret_token=token) class SoundcloudPlaylistBaseIE(SoundcloudIE): @staticmethod def _extract_id(e): return compat_str(e['id']) if e.get('id') else None def _extract_track_entries(self, tracks): return [ self.url_result( track['permalink_url'], SoundcloudIE.ie_key(), video_id=self._extract_id(track)) for track in tracks if track.get('permalink_url')] class SoundcloudSetIE(SoundcloudPlaylistBaseIE): _VALID_URL = r'https?://(?:(?:www|m)\.)?soundcloud\.com/(?P<uploader>[\w\d-]+)/sets/(?P<slug_title>[\w\d-]+)(?:/(?P<token>[^?/]+))?' IE_NAME = 'soundcloud:set' _TESTS = [{ 'url': 'https://soundcloud.com/the-concept-band/sets/the-royal-concept-ep', 'info_dict': { 'id': '2284613', 'title': 'The Royal Concept EP', }, 'playlist_mincount': 6, }, { 'url': 'https://soundcloud.com/the-concept-band/sets/the-royal-concept-ep/token', 'only_matching': True, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) # extract uploader (which is in the url) uploader = mobj.group('uploader') # extract simple title (uploader + slug of song title) slug_title = mobj.group('slug_title') full_title = '%s/sets/%s' % (uploader, slug_title) url = 'http://soundcloud.com/%s/sets/%s' % (uploader, slug_title) token = mobj.group('token') if token: full_title += '/' + token url += '/' + token self.report_resolve(full_title) resolv_url = self._resolv_url(url) info = self._download_json(resolv_url, full_title) if 'errors' in info: msgs = (compat_str(err['error_message']) for err in info['errors']) raise ExtractorError('unable to download video webpage: %s' % ','.join(msgs)) entries = self._extract_track_entries(info['tracks']) return { '_type': 'playlist', 'entries': entries, 'id': '%s' % info['id'], 'title': info['title'], } class SoundcloudUserIE(SoundcloudPlaylistBaseIE): _VALID_URL = r'''(?x) https?:// (?:(?:www|m)\.)?soundcloud\.com/ (?P<user>[^/]+) (?:/ (?P<rsrc>tracks|sets|reposts|likes|spotlight) )? /?(?:[?#].*)?$ ''' IE_NAME = 'soundcloud:user' _TESTS = [{ 'url': 'https://soundcloud.com/the-akashic-chronicler', 'info_dict': { 'id': '114582580', 'title': 'The Akashic Chronicler (All)', }, 'playlist_mincount': 74, }, { 'url': 'https://soundcloud.com/the-akashic-chronicler/tracks', 'info_dict': { 'id': '114582580', 'title': 'The Akashic Chronicler (Tracks)', }, 'playlist_mincount': 37, }, { 'url': 'https://soundcloud.com/the-akashic-chronicler/sets', 'info_dict': { 'id': '114582580', 'title': 'The Akashic Chronicler (Playlists)', }, 'playlist_mincount': 2, }, { 'url': 'https://soundcloud.com/the-akashic-chronicler/reposts', 'info_dict': { 'id': '114582580', 'title': 'The Akashic Chronicler (Reposts)', }, 'playlist_mincount': 7, }, { 'url': 'https://soundcloud.com/the-akashic-chronicler/likes', 'info_dict': { 'id': '114582580', 'title': 'The Akashic Chronicler (Likes)', }, 'playlist_mincount': 321, }, { 'url': 'https://soundcloud.com/grynpyret/spotlight', 'info_dict': { 'id': '7098329', 'title': 'GRYNPYRET (Spotlight)', }, 'playlist_mincount': 1, }] _API_BASE = 'https://api.soundcloud.com' _API_V2_BASE = 'https://api-v2.soundcloud.com' _BASE_URL_MAP = { 'all': '%s/profile/soundcloud:users:%%s' % _API_V2_BASE, 'tracks': '%s/users/%%s/tracks' % _API_BASE, 'sets': '%s/users/%%s/playlists' % _API_V2_BASE, 'reposts': '%s/profile/soundcloud:users:%%s/reposts' % _API_V2_BASE, 'likes': '%s/users/%%s/likes' % _API_V2_BASE, 'spotlight': '%s/users/%%s/spotlight' % _API_V2_BASE, } _TITLE_MAP = { 'all': 'All', 'tracks': 'Tracks', 'sets': 'Playlists', 'reposts': 'Reposts', 'likes': 'Likes', 'spotlight': 'Spotlight', } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) uploader = mobj.group('user') url = 'http://soundcloud.com/%s/' % uploader resolv_url = self._resolv_url(url) user = self._download_json( resolv_url, uploader, 'Downloading user info') resource = mobj.group('rsrc') or 'all' base_url = self._BASE_URL_MAP[resource] % user['id'] COMMON_QUERY = { 'limit': 50, 'client_id': self._CLIENT_ID, 'linked_partitioning': '1', } query = COMMON_QUERY.copy() query['offset'] = 0 next_href = base_url + '?' + compat_urllib_parse_urlencode(query) entries = [] for i in itertools.count(): response = self._download_json( next_href, uploader, 'Downloading track page %s' % (i + 1)) collection = response['collection'] if not collection: break def resolve_permalink_url(candidates): for cand in candidates: if isinstance(cand, dict): permalink_url = cand.get('permalink_url') entry_id = self._extract_id(cand) if permalink_url and permalink_url.startswith('http'): return permalink_url, entry_id for e in collection: permalink_url, entry_id = resolve_permalink_url((e, e.get('track'), e.get('playlist'))) if permalink_url: entries.append(self.url_result(permalink_url, video_id=entry_id)) next_href = response.get('next_href') if not next_href: break parsed_next_href = compat_urlparse.urlparse(response['next_href']) qs = compat_urlparse.parse_qs(parsed_next_href.query) qs.update(COMMON_QUERY) next_href = compat_urlparse.urlunparse( parsed_next_href._replace(query=compat_urllib_parse_urlencode(qs, True))) return { '_type': 'playlist', 'id': compat_str(user['id']), 'title': '%s (%s)' % (user['username'], self._TITLE_MAP[resource]), 'entries': entries, } class SoundcloudPlaylistIE(SoundcloudPlaylistBaseIE): _VALID_URL = r'https?://api\.soundcloud\.com/playlists/(?P<id>[0-9]+)(?:/?\?secret_token=(?P<token>[^&]+?))?$' IE_NAME = 'soundcloud:playlist' _TESTS = [{ 'url': 'http://api.soundcloud.com/playlists/4110309', 'info_dict': { 'id': '4110309', 'title': 'TILT Brass - Bowery Poetry Club, August \'03 [Non-Site SCR 02]', 'description': 're:.*?TILT Brass - Bowery Poetry Club', }, 'playlist_count': 6, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) playlist_id = mobj.group('id') base_url = '%s//api.soundcloud.com/playlists/%s.json?' % (self.http_scheme(), playlist_id) data_dict = { 'client_id': self._CLIENT_ID, } token = mobj.group('token') if token: data_dict['secret_token'] = token data = compat_urllib_parse_urlencode(data_dict) data = self._download_json( base_url + data, playlist_id, 'Downloading playlist') entries = self._extract_track_entries(data['tracks']) return { '_type': 'playlist', 'id': playlist_id, 'title': data.get('title'), 'description': data.get('description'), 'entries': entries, } class SoundcloudSearchIE(SearchInfoExtractor, SoundcloudIE): IE_NAME = 'soundcloud:search' IE_DESC = 'Soundcloud search' _MAX_RESULTS = float('inf') _TESTS = [{ 'url': 'scsearch15:post-avant jazzcore', 'info_dict': { 'title': 'post-avant jazzcore', }, 'playlist_count': 15, }] _SEARCH_KEY = 'scsearch' _MAX_RESULTS_PER_PAGE = 200 _DEFAULT_RESULTS_PER_PAGE = 50 _API_V2_BASE = 'https://api-v2.soundcloud.com' def _get_collection(self, endpoint, collection_id, **query): limit = min( query.get('limit', self._DEFAULT_RESULTS_PER_PAGE), self._MAX_RESULTS_PER_PAGE) query['limit'] = limit query['client_id'] = self._CLIENT_ID query['linked_partitioning'] = '1' query['offset'] = 0 data = compat_urllib_parse_urlencode(query) next_url = '{0}{1}?{2}'.format(self._API_V2_BASE, endpoint, data) collected_results = 0 for i in itertools.count(1): response = self._download_json( next_url, collection_id, 'Downloading page {0}'.format(i), 'Unable to download API page') collection = response.get('collection', []) if not collection: break collection = list(filter(bool, collection)) collected_results += len(collection) for item in collection: yield self.url_result(item['uri'], SoundcloudIE.ie_key()) if not collection or collected_results >= limit: break next_url = response.get('next_href') if not next_url: break def _get_n_results(self, query, n): tracks = self._get_collection('/search/tracks', query, limit=n, q=query) return self.playlist_result(tracks, playlist_title=query)
mit
nkrinner/nova
nova/tests/api/openstack/test_common.py
5
26241
# Copyright 2010 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ Test suites for 'common' code used throughout the OpenStack HTTP API. """ from lxml import etree import mock from testtools import matchers import webob import webob.exc import xml.dom.minidom as minidom from nova.api.openstack import common from nova.api.openstack import xmlutil from nova.compute import task_states from nova.compute import vm_states from nova import exception from nova import test from nova.tests import utils NS = "{http://docs.openstack.org/compute/api/v1.1}" ATOMNS = "{http://www.w3.org/2005/Atom}" class LimiterTest(test.TestCase): """Unit tests for the `nova.api.openstack.common.limited` method which takes in a list of items and, depending on the 'offset' and 'limit' GET params, returns a subset or complete set of the given items. """ def setUp(self): """Run before each test.""" super(LimiterTest, self).setUp() self.tiny = range(1) self.small = range(10) self.medium = range(1000) self.large = range(10000) def test_limiter_offset_zero(self): # Test offset key works with 0. req = webob.Request.blank('/?offset=0') self.assertEqual(common.limited(self.tiny, req), self.tiny) self.assertEqual(common.limited(self.small, req), self.small) self.assertEqual(common.limited(self.medium, req), self.medium) self.assertEqual(common.limited(self.large, req), self.large[:1000]) def test_limiter_offset_medium(self): # Test offset key works with a medium sized number. req = webob.Request.blank('/?offset=10') self.assertEqual(common.limited(self.tiny, req), []) self.assertEqual(common.limited(self.small, req), self.small[10:]) self.assertEqual(common.limited(self.medium, req), self.medium[10:]) self.assertEqual(common.limited(self.large, req), self.large[10:1010]) def test_limiter_offset_over_max(self): # Test offset key works with a number over 1000 (max_limit). req = webob.Request.blank('/?offset=1001') self.assertEqual(common.limited(self.tiny, req), []) self.assertEqual(common.limited(self.small, req), []) self.assertEqual(common.limited(self.medium, req), []) self.assertEqual( common.limited(self.large, req), self.large[1001:2001]) def test_limiter_offset_blank(self): # Test offset key works with a blank offset. req = webob.Request.blank('/?offset=') self.assertRaises( webob.exc.HTTPBadRequest, common.limited, self.tiny, req) def test_limiter_offset_bad(self): # Test offset key works with a BAD offset. req = webob.Request.blank(u'/?offset=\u0020aa') self.assertRaises( webob.exc.HTTPBadRequest, common.limited, self.tiny, req) def test_limiter_nothing(self): # Test request with no offset or limit. req = webob.Request.blank('/') self.assertEqual(common.limited(self.tiny, req), self.tiny) self.assertEqual(common.limited(self.small, req), self.small) self.assertEqual(common.limited(self.medium, req), self.medium) self.assertEqual(common.limited(self.large, req), self.large[:1000]) def test_limiter_limit_zero(self): # Test limit of zero. req = webob.Request.blank('/?limit=0') self.assertEqual(common.limited(self.tiny, req), self.tiny) self.assertEqual(common.limited(self.small, req), self.small) self.assertEqual(common.limited(self.medium, req), self.medium) self.assertEqual(common.limited(self.large, req), self.large[:1000]) def test_limiter_limit_medium(self): # Test limit of 10. req = webob.Request.blank('/?limit=10') self.assertEqual(common.limited(self.tiny, req), self.tiny) self.assertEqual(common.limited(self.small, req), self.small) self.assertEqual(common.limited(self.medium, req), self.medium[:10]) self.assertEqual(common.limited(self.large, req), self.large[:10]) def test_limiter_limit_over_max(self): # Test limit of 3000. req = webob.Request.blank('/?limit=3000') self.assertEqual(common.limited(self.tiny, req), self.tiny) self.assertEqual(common.limited(self.small, req), self.small) self.assertEqual(common.limited(self.medium, req), self.medium) self.assertEqual(common.limited(self.large, req), self.large[:1000]) def test_limiter_limit_and_offset(self): # Test request with both limit and offset. items = range(2000) req = webob.Request.blank('/?offset=1&limit=3') self.assertEqual(common.limited(items, req), items[1:4]) req = webob.Request.blank('/?offset=3&limit=0') self.assertEqual(common.limited(items, req), items[3:1003]) req = webob.Request.blank('/?offset=3&limit=1500') self.assertEqual(common.limited(items, req), items[3:1003]) req = webob.Request.blank('/?offset=3000&limit=10') self.assertEqual(common.limited(items, req), []) def test_limiter_custom_max_limit(self): # Test a max_limit other than 1000. items = range(2000) req = webob.Request.blank('/?offset=1&limit=3') self.assertEqual( common.limited(items, req, max_limit=2000), items[1:4]) req = webob.Request.blank('/?offset=3&limit=0') self.assertEqual( common.limited(items, req, max_limit=2000), items[3:]) req = webob.Request.blank('/?offset=3&limit=2500') self.assertEqual( common.limited(items, req, max_limit=2000), items[3:]) req = webob.Request.blank('/?offset=3000&limit=10') self.assertEqual(common.limited(items, req, max_limit=2000), []) def test_limiter_negative_limit(self): # Test a negative limit. req = webob.Request.blank('/?limit=-3000') self.assertRaises( webob.exc.HTTPBadRequest, common.limited, self.tiny, req) def test_limiter_negative_offset(self): # Test a negative offset. req = webob.Request.blank('/?offset=-30') self.assertRaises( webob.exc.HTTPBadRequest, common.limited, self.tiny, req) class PaginationParamsTest(test.TestCase): """Unit tests for the `nova.api.openstack.common.get_pagination_params` method which takes in a request object and returns 'marker' and 'limit' GET params. """ def test_no_params(self): # Test no params. req = webob.Request.blank('/') self.assertEqual(common.get_pagination_params(req), {}) def test_valid_marker(self): # Test valid marker param. req = webob.Request.blank( '/?marker=263abb28-1de6-412f-b00b-f0ee0c4333c2') self.assertEqual(common.get_pagination_params(req), {'marker': '263abb28-1de6-412f-b00b-f0ee0c4333c2'}) def test_valid_limit(self): # Test valid limit param. req = webob.Request.blank('/?limit=10') self.assertEqual(common.get_pagination_params(req), {'limit': 10}) def test_invalid_limit(self): # Test invalid limit param. req = webob.Request.blank('/?limit=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_marker(self): # Test valid limit and marker parameters. marker = '263abb28-1de6-412f-b00b-f0ee0c4333c2' req = webob.Request.blank('/?limit=20&marker=%s' % marker) self.assertEqual(common.get_pagination_params(req), {'marker': marker, 'limit': 20}) def test_valid_page_size(self): # Test valid page_size param. req = webob.Request.blank('/?page_size=10') self.assertEqual(common.get_pagination_params(req), {'page_size': 10}) def test_invalid_page_size(self): # Test invalid page_size param. req = webob.Request.blank('/?page_size=-2') self.assertRaises( webob.exc.HTTPBadRequest, common.get_pagination_params, req) def test_valid_limit_and_page_size(self): # Test valid limit and page_size parameters. req = webob.Request.blank('/?limit=20&page_size=5') self.assertEqual(common.get_pagination_params(req), {'page_size': 5, 'limit': 20}) class MiscFunctionsTest(test.TestCase): def test_remove_major_version_from_href(self): fixture = 'http://www.testsite.com/v1/images' expected = 'http://www.testsite.com/images' actual = common.remove_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_version_from_href(self): fixture = 'http://www.testsite.com/v1.1/images' expected = 'http://www.testsite.com/images' actual = common.remove_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_version_from_href_2(self): fixture = 'http://www.testsite.com/v1.1/' expected = 'http://www.testsite.com/' actual = common.remove_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_version_from_href_3(self): fixture = 'http://www.testsite.com/v10.10' expected = 'http://www.testsite.com' actual = common.remove_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_version_from_href_4(self): fixture = 'http://www.testsite.com/v1.1/images/v10.5' expected = 'http://www.testsite.com/images/v10.5' actual = common.remove_version_from_href(fixture) self.assertEqual(actual, expected) def test_remove_version_from_href_bad_request(self): fixture = 'http://www.testsite.com/1.1/images' self.assertRaises(ValueError, common.remove_version_from_href, fixture) def test_remove_version_from_href_bad_request_2(self): fixture = 'http://www.testsite.com/v/images' self.assertRaises(ValueError, common.remove_version_from_href, fixture) def test_remove_version_from_href_bad_request_3(self): fixture = 'http://www.testsite.com/v1.1images' self.assertRaises(ValueError, common.remove_version_from_href, fixture) def test_get_id_from_href_with_int_url(self): fixture = 'http://www.testsite.com/dir/45' actual = common.get_id_from_href(fixture) expected = '45' self.assertEqual(actual, expected) def test_get_id_from_href_with_int(self): fixture = '45' actual = common.get_id_from_href(fixture) expected = '45' self.assertEqual(actual, expected) def test_get_id_from_href_with_int_url_query(self): fixture = 'http://www.testsite.com/dir/45?asdf=jkl' actual = common.get_id_from_href(fixture) expected = '45' self.assertEqual(actual, expected) def test_get_id_from_href_with_uuid_url(self): fixture = 'http://www.testsite.com/dir/abc123' actual = common.get_id_from_href(fixture) expected = "abc123" self.assertEqual(actual, expected) def test_get_id_from_href_with_uuid_url_query(self): fixture = 'http://www.testsite.com/dir/abc123?asdf=jkl' actual = common.get_id_from_href(fixture) expected = "abc123" self.assertEqual(actual, expected) def test_get_id_from_href_with_uuid(self): fixture = 'abc123' actual = common.get_id_from_href(fixture) expected = 'abc123' self.assertEqual(actual, expected) def test_raise_http_conflict_for_instance_invalid_state(self): exc = exception.InstanceInvalidState(attr='fake_attr', state='fake_state', method='fake_method', instance_uuid='fake') try: common.raise_http_conflict_for_instance_invalid_state(exc, 'meow') except webob.exc.HTTPConflict as e: self.assertEqual(unicode(e), "Cannot 'meow' while instance is in fake_attr fake_state") else: self.fail("webob.exc.HTTPConflict was not raised") def test_check_img_metadata_properties_quota_valid_metadata(self): ctxt = utils.get_test_admin_context() metadata1 = {"key": "value"} actual = common.check_img_metadata_properties_quota(ctxt, metadata1) self.assertIsNone(actual) metadata2 = {"key": "v" * 260} actual = common.check_img_metadata_properties_quota(ctxt, metadata2) self.assertIsNone(actual) metadata3 = {"key": ""} actual = common.check_img_metadata_properties_quota(ctxt, metadata3) self.assertIsNone(actual) def test_check_img_metadata_properties_quota_inv_metadata(self): ctxt = utils.get_test_admin_context() metadata1 = {"a" * 260: "value"} self.assertRaises(webob.exc.HTTPBadRequest, common.check_img_metadata_properties_quota, ctxt, metadata1) metadata2 = {"": "value"} self.assertRaises(webob.exc.HTTPBadRequest, common.check_img_metadata_properties_quota, ctxt, metadata2) metadata3 = "invalid metadata" self.assertRaises(webob.exc.HTTPBadRequest, common.check_img_metadata_properties_quota, ctxt, metadata3) metadata4 = None self.assertIsNone(common.check_img_metadata_properties_quota(ctxt, metadata4)) metadata5 = {} self.assertIsNone(common.check_img_metadata_properties_quota(ctxt, metadata5)) def test_status_from_state(self): for vm_state in (vm_states.ACTIVE, vm_states.STOPPED): for task_state in (task_states.RESIZE_PREP, task_states.RESIZE_MIGRATING, task_states.RESIZE_MIGRATED, task_states.RESIZE_FINISH): actual = common.status_from_state(vm_state, task_state) expected = 'RESIZE' self.assertEqual(expected, actual) def test_task_and_vm_state_from_status(self): fixture1 = 'reboot' actual = common.task_and_vm_state_from_status(fixture1) expected = [vm_states.ACTIVE], [task_states.REBOOT_PENDING, task_states.REBOOT_STARTED, task_states.REBOOTING] self.assertEqual(expected, actual) fixture2 = 'resize' actual = common.task_and_vm_state_from_status(fixture2) expected = ([vm_states.ACTIVE, vm_states.STOPPED], [task_states.RESIZE_FINISH, task_states.RESIZE_MIGRATED, task_states.RESIZE_MIGRATING, task_states.RESIZE_PREP]) self.assertEqual(expected, actual) class TestCollectionLinks(test.NoDBTestCase): """Tests the _get_collection_links method.""" @mock.patch('nova.api.openstack.common.ViewBuilder._get_next_link') def test_items_less_than_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=10)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, "ignored", "uuid") href_link_mock.assert_not_called() self.assertThat(results, matchers.HasLength(0)) @mock.patch('nova.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_given_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict(limit=1)) type(req).params = params builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('nova.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() params = mock.PropertyMock(return_value=dict()) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) @mock.patch('nova.api.openstack.common.ViewBuilder._get_next_link') def test_items_equals_default_limit_with_given(self, href_link_mock): items = [ {"uuid": "123"} ] req = mock.MagicMock() # Given limit is greater then default max, only return default max params = mock.PropertyMock(return_value=dict(limit=2)) type(req).params = params self.flags(osapi_max_limit=1) builder = common.ViewBuilder() results = builder._get_collection_links(req, items, mock.sentinel.coll_key, "uuid") href_link_mock.assert_called_once_with(req, "123", mock.sentinel.coll_key) self.assertThat(results, matchers.HasLength(1)) class MetadataXMLDeserializationTest(test.TestCase): deserializer = common.MetadataXMLDeserializer() def test_create(self): request_body = """ <metadata xmlns="http://docs.openstack.org/compute/api/v1.1"> <meta key='123'>asdf</meta> <meta key='567'>jkl;</meta> </metadata>""" output = self.deserializer.deserialize(request_body, 'create') expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}} self.assertEqual(output, expected) def test_create_empty(self): request_body = """ <metadata xmlns="http://docs.openstack.org/compute/api/v1.1"/>""" output = self.deserializer.deserialize(request_body, 'create') expected = {"body": {"metadata": {}}} self.assertEqual(output, expected) def test_update_all(self): request_body = """ <metadata xmlns="http://docs.openstack.org/compute/api/v1.1"> <meta key='123'>asdf</meta> <meta key='567'>jkl;</meta> </metadata>""" output = self.deserializer.deserialize(request_body, 'update_all') expected = {"body": {"metadata": {"123": "asdf", "567": "jkl;"}}} self.assertEqual(output, expected) def test_update(self): request_body = """ <meta xmlns="http://docs.openstack.org/compute/api/v1.1" key='123'>asdf</meta>""" output = self.deserializer.deserialize(request_body, 'update') expected = {"body": {"meta": {"123": "asdf"}}} self.assertEqual(output, expected) class MetadataXMLSerializationTest(test.TestCase): def test_xml_declaration(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { 'one': 'two', 'three': 'four', }, } output = serializer.serialize(fixture) has_dec = output.startswith("<?xml version='1.0' encoding='UTF-8'?>") self.assertTrue(has_dec) def test_index(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { 'one': 'two', 'three': 'four', }, } output = serializer.serialize(fixture) root = etree.XML(output) xmlutil.validate_schema(root, 'metadata') metadata_dict = fixture['metadata'] metadata_elems = root.findall('{0}meta'.format(NS)) self.assertEqual(len(metadata_elems), 2) for i, metadata_elem in enumerate(metadata_elems): (meta_key, meta_value) = metadata_dict.items()[i] self.assertEqual(str(metadata_elem.get('key')), str(meta_key)) self.assertEqual(str(metadata_elem.text).strip(), str(meta_value)) def test_index_null(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { None: None, }, } output = serializer.serialize(fixture) root = etree.XML(output) xmlutil.validate_schema(root, 'metadata') metadata_dict = fixture['metadata'] metadata_elems = root.findall('{0}meta'.format(NS)) self.assertEqual(len(metadata_elems), 1) for i, metadata_elem in enumerate(metadata_elems): (meta_key, meta_value) = metadata_dict.items()[i] self.assertEqual(str(metadata_elem.get('key')), str(meta_key)) self.assertEqual(str(metadata_elem.text).strip(), str(meta_value)) def test_index_unicode(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { u'three': u'Jos\xe9', }, } output = serializer.serialize(fixture) root = etree.XML(output) xmlutil.validate_schema(root, 'metadata') metadata_dict = fixture['metadata'] metadata_elems = root.findall('{0}meta'.format(NS)) self.assertEqual(len(metadata_elems), 1) for i, metadata_elem in enumerate(metadata_elems): (meta_key, meta_value) = metadata_dict.items()[i] self.assertEqual(str(metadata_elem.get('key')), str(meta_key)) self.assertEqual(metadata_elem.text.strip(), meta_value) def test_show(self): serializer = common.MetaItemTemplate() fixture = { 'meta': { 'one': 'two', }, } output = serializer.serialize(fixture) root = etree.XML(output) meta_dict = fixture['meta'] (meta_key, meta_value) = meta_dict.items()[0] self.assertEqual(str(root.get('key')), str(meta_key)) self.assertEqual(root.text.strip(), meta_value) def test_update_all(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { 'key6': 'value6', 'key4': 'value4', }, } output = serializer.serialize(fixture) root = etree.XML(output) xmlutil.validate_schema(root, 'metadata') metadata_dict = fixture['metadata'] metadata_elems = root.findall('{0}meta'.format(NS)) self.assertEqual(len(metadata_elems), 2) for i, metadata_elem in enumerate(metadata_elems): (meta_key, meta_value) = metadata_dict.items()[i] self.assertEqual(str(metadata_elem.get('key')), str(meta_key)) self.assertEqual(str(metadata_elem.text).strip(), str(meta_value)) def test_update_item(self): serializer = common.MetaItemTemplate() fixture = { 'meta': { 'one': 'two', }, } output = serializer.serialize(fixture) root = etree.XML(output) meta_dict = fixture['meta'] (meta_key, meta_value) = meta_dict.items()[0] self.assertEqual(str(root.get('key')), str(meta_key)) self.assertEqual(root.text.strip(), meta_value) def test_create(self): serializer = common.MetadataTemplate() fixture = { 'metadata': { 'key9': 'value9', 'key2': 'value2', 'key1': 'value1', }, } output = serializer.serialize(fixture) root = etree.XML(output) xmlutil.validate_schema(root, 'metadata') metadata_dict = fixture['metadata'] metadata_elems = root.findall('{0}meta'.format(NS)) self.assertEqual(len(metadata_elems), 3) for i, metadata_elem in enumerate(metadata_elems): (meta_key, meta_value) = metadata_dict.items()[i] self.assertEqual(str(metadata_elem.get('key')), str(meta_key)) self.assertEqual(str(metadata_elem.text).strip(), str(meta_value)) actual = minidom.parseString(output.replace(" ", "")) expected = minidom.parseString(""" <metadata xmlns="http://docs.openstack.org/compute/api/v1.1"> <meta key="key2">value2</meta> <meta key="key9">value9</meta> <meta key="key1">value1</meta> </metadata> """.replace(" ", "").replace("\n", "")) self.assertEqual(expected.toxml(), actual.toxml()) def test_metadata_deserializer(self): """Should throw a 400 error on corrupt xml.""" deserializer = common.MetadataXMLDeserializer() self.assertRaises( exception.MalformedRequestBody, deserializer.deserialize, utils.killer_xml_body())
apache-2.0
zenweasel/hendrix
hendrix/services.py
1
3858
import sys import importlib from .resources import HendrixResource from twisted.application import internet, service from twisted.internet import reactor, protocol from twisted.python.threadpool import ThreadPool from twisted.web import server, resource, static import logging logger = logging.getLogger(__name__) class HendrixService(service.MultiService): """ HendrixService is a constructor that facilitates the collection of services and the extension of resources on the website by subclassing MultiService. 'application' refers to an instance of django.core.handlers.wsgi.WSGIHandler 'resources' refers to a list of twisted Resources with a namespace attribute 'services' refers to a list of twisted Services to add to the collection. """ def __init__(self, application, port=80, resources=None, services=None): service.MultiService.__init__(self) # Create, start and add a thread pool service, which is made available # to our WSGIResource within HendrixResource threads = ThreadPool() reactor.addSystemEventTrigger('after', 'shutdown', threads.stop) ThreadPoolService(threads).setServiceParent(self) # create the base resource and add any additional static resources resource = HendrixResource(reactor, threads, application) if resources: for res in resources: resource.putNamedChild(res) factory = server.Site(resource) # add a tcp server that binds to port=port main_web_tcp = TCPServer(port, factory) main_web_tcp.setName('main_web_tcp') # to get this at runtime use hedrix_service.getServiceNamed('main_web_tcp') main_web_tcp.setServiceParent(self) # add any additional services if services: logger.info('loaded %r at %r'%(srv,srv_name)) for srv_name, srv in services: srv.setName(srv_name) srv.setServiceParent(self) def get_port(self, name): "Return the port object associated to our tcp server" service = self.getServiceNamed(name) return service._port def add_server(self, name, protocol, server): self.servers[(name, protocol)] = server class ThreadPoolService(service.Service): ''' A simple class that defines a threadpool on init and provides for starting and stopping it. ''' def __init__(self, pool): "self.pool returns the twisted.python.ThreadPool() instance." if not isinstance(pool, ThreadPool): msg = '%s must be initialised with a ThreadPool instance' raise TypeError( msg % self.__class__.__name__ ) self.pool = pool def startService(self): service.Service.startService(self) self.pool.start() def stopService(self): service.Service.stopService(self) self.pool.stop() def get_additional_services(settings_module): """ if HENDRIX_SERVICES is specified in settings_module, it should be a list twisted internet services example: HENDRIX_SERVICES = ( ('myServiceName', 'apps.offload.services.TimeService'), ) """ additional_services = [] if hasattr(settings_module, 'HENDRIX_SERVICES'): for name, module_path in settings_module.HENDRIX_SERVICES: path_to_module, service_name = module_path.rsplit('.', 1) resource_module = importlib.import_module(path_to_module) additional_services.append((name, getattr(resource_module, service_name))) return additional_services class TCPServer(internet.TCPServer): def __init__(self, port, factory, *args, **kwargs): internet.TCPServer.__init__(self, port, factory, *args, **kwargs) self.factory = factory
mit
catchmrbharath/servo
tests/wpt/css-tests/tools/html5lib/doc/conf.py
436
9028
#!/usr/bin/env python3 # -*- coding: utf-8 -*- # # html5lib documentation build configuration file, created by # sphinx-quickstart on Wed May 8 00:04:49 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.viewcode'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = 'html5lib' copyright = '2006 - 2013, James Graham, Geoffrey Sneddon, and contributors' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '1.0' # The full version, including alpha/beta/rc tags. sys.path.append(os.path.abspath('..')) from html5lib import __version__ release = __version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build', 'theme'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'html5libdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'html5lib.tex', 'html5lib Documentation', 'James Graham, Geoffrey Sneddon, and contributors', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'html5lib', 'html5lib Documentation', ['James Graham, Geoffrey Sneddon, and contributors'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'html5lib', 'html5lib Documentation', 'James Graham, Geoffrey Sneddon, and contributors', 'html5lib', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False class CExtMock(object): """Required for autodoc on readthedocs.org where you cannot build C extensions.""" def __init__(self, *args, **kwargs): pass def __call__(self, *args, **kwargs): return CExtMock() @classmethod def __getattr__(cls, name): if name in ('__file__', '__path__'): return '/dev/null' else: return CExtMock() try: import lxml # flake8: noqa except ImportError: sys.modules['lxml'] = CExtMock() sys.modules['lxml.etree'] = CExtMock() print("warning: lxml modules mocked.") try: import genshi # flake8: noqa except ImportError: sys.modules['genshi'] = CExtMock() sys.modules['genshi.core'] = CExtMock() print("warning: genshi modules mocked.")
mpl-2.0
royalharsh/grpc
src/python/grpcio_tests/tests/unit/framework/common/test_control.py
23
4048
# Copyright 2015, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Code for instructing systems under test to block or fail.""" import abc import contextlib import threading import six class Defect(Exception): """Simulates a programming defect raised into in a system under test. Use of a standard exception type is too easily misconstrued as an actual defect in either the test infrastructure or the system under test. """ class Control(six.with_metaclass(abc.ABCMeta)): """An object that accepts program control from a system under test. Systems under test passed a Control should call its control() method frequently during execution. The control() method may block, raise an exception, or do nothing, all according to the enclosing test's desire for the system under test to simulate hanging, failing, or functioning. """ @abc.abstractmethod def control(self): """Potentially does anything.""" raise NotImplementedError() class PauseFailControl(Control): """A Control that can be used to pause or fail code under control. This object is only safe for use from two threads: one of the system under test calling control and the other from the test system calling pause, block_until_paused, and fail. """ def __init__(self): self._condition = threading.Condition() self._pause = False self._paused = False self._fail = False def control(self): with self._condition: if self._fail: raise Defect() while self._pause: self._paused = True self._condition.notify_all() self._condition.wait() self._paused = False @contextlib.contextmanager def pause(self): """Pauses code under control while controlling code is in context.""" with self._condition: self._pause = True yield with self._condition: self._pause = False self._condition.notify_all() def block_until_paused(self): """Blocks controlling code until code under control is paused. May only be called within the context of a pause call. """ with self._condition: while not self._paused: self._condition.wait() @contextlib.contextmanager def fail(self): """Fails code under control while controlling code is in context.""" with self._condition: self._fail = True yield with self._condition: self._fail = False
bsd-3-clause
googleinterns/learnbase
learnbase/src/main/webapp/WEB-INF/Lib/ConfigParser.py
186
27746
"""Configuration file parser. A setup file consists of sections, lead by a "[section]" header, and followed by "name: value" entries, with continuations and such in the style of RFC 822. The option values can contain format strings which refer to other values in the same section, or values in a special [DEFAULT] section. For example: something: %(dir)s/whatever would resolve the "%(dir)s" to the value of dir. All reference expansions are done late, on demand. Intrinsic defaults can be specified by passing them into the ConfigParser constructor as a dictionary. class: ConfigParser -- responsible for parsing a list of configuration files, and managing the parsed database. methods: __init__(defaults=None) create the parser and specify a dictionary of intrinsic defaults. The keys must be strings, the values must be appropriate for %()s string interpolation. Note that `__name__' is always an intrinsic default; its value is the section's name. sections() return all the configuration section names, sans DEFAULT has_section(section) return whether the given section exists has_option(section, option) return whether the given option exists in the given section options(section) return list of configuration options for the named section read(filenames) read and parse the list of named configuration files, given by name. A single filename is also allowed. Non-existing files are ignored. Return list of successfully read files. readfp(fp, filename=None) read and parse one configuration file, given as a file object. The filename defaults to fp.name; it is only used in error messages (if fp has no `name' attribute, the string `<???>' is used). get(section, option, raw=False, vars=None) return a string value for the named option. All % interpolations are expanded in the return values, based on the defaults passed into the constructor and the DEFAULT section. Additional substitutions may be provided using the `vars' argument, which must be a dictionary whose contents override any pre-existing defaults. getint(section, options) like get(), but convert value to an integer getfloat(section, options) like get(), but convert value to a float getboolean(section, options) like get(), but convert value to a boolean (currently case insensitively defined as 0, false, no, off for False, and 1, true, yes, on for True). Returns False or True. items(section, raw=False, vars=None) return a list of tuples with (name, value) for each option in the section. remove_section(section) remove the given file section and all its options remove_option(section, option) remove the given option from the given section set(section, option, value) set the given option write(fp) write the configuration state in .ini format """ try: from collections import OrderedDict as _default_dict except ImportError: # fallback for setup.py which hasn't yet built _collections _default_dict = dict import re __all__ = ["NoSectionError", "DuplicateSectionError", "NoOptionError", "InterpolationError", "InterpolationDepthError", "InterpolationSyntaxError", "ParsingError", "MissingSectionHeaderError", "ConfigParser", "SafeConfigParser", "RawConfigParser", "DEFAULTSECT", "MAX_INTERPOLATION_DEPTH"] DEFAULTSECT = "DEFAULT" MAX_INTERPOLATION_DEPTH = 10 # exception classes class Error(Exception): """Base class for ConfigParser exceptions.""" def _get_message(self): """Getter for 'message'; needed only to override deprecation in BaseException.""" return self.__message def _set_message(self, value): """Setter for 'message'; needed only to override deprecation in BaseException.""" self.__message = value # BaseException.message has been deprecated since Python 2.6. To prevent # DeprecationWarning from popping up over this pre-existing attribute, use # a new property that takes lookup precedence. message = property(_get_message, _set_message) def __init__(self, msg=''): self.message = msg Exception.__init__(self, msg) def __repr__(self): return self.message __str__ = __repr__ class NoSectionError(Error): """Raised when no section matches a requested option.""" def __init__(self, section): Error.__init__(self, 'No section: %r' % (section,)) self.section = section self.args = (section, ) class DuplicateSectionError(Error): """Raised when a section is multiply-created.""" def __init__(self, section): Error.__init__(self, "Section %r already exists" % section) self.section = section self.args = (section, ) class NoOptionError(Error): """A requested option was not found.""" def __init__(self, option, section): Error.__init__(self, "No option %r in section: %r" % (option, section)) self.option = option self.section = section self.args = (option, section) class InterpolationError(Error): """Base class for interpolation-related exceptions.""" def __init__(self, option, section, msg): Error.__init__(self, msg) self.option = option self.section = section self.args = (option, section, msg) class InterpolationMissingOptionError(InterpolationError): """A string substitution required a setting which was not available.""" def __init__(self, option, section, rawval, reference): msg = ("Bad value substitution:\n" "\tsection: [%s]\n" "\toption : %s\n" "\tkey : %s\n" "\trawval : %s\n" % (section, option, reference, rawval)) InterpolationError.__init__(self, option, section, msg) self.reference = reference self.args = (option, section, rawval, reference) class InterpolationSyntaxError(InterpolationError): """Raised when the source text into which substitutions are made does not conform to the required syntax.""" class InterpolationDepthError(InterpolationError): """Raised when substitutions are nested too deeply.""" def __init__(self, option, section, rawval): msg = ("Value interpolation too deeply recursive:\n" "\tsection: [%s]\n" "\toption : %s\n" "\trawval : %s\n" % (section, option, rawval)) InterpolationError.__init__(self, option, section, msg) self.args = (option, section, rawval) class ParsingError(Error): """Raised when a configuration file does not follow legal syntax.""" def __init__(self, filename): Error.__init__(self, 'File contains parsing errors: %s' % filename) self.filename = filename self.errors = [] self.args = (filename, ) def append(self, lineno, line): self.errors.append((lineno, line)) self.message += '\n\t[line %2d]: %s' % (lineno, line) class MissingSectionHeaderError(ParsingError): """Raised when a key-value pair is found before any section header.""" def __init__(self, filename, lineno, line): Error.__init__( self, 'File contains no section headers.\nfile: %s, line: %d\n%r' % (filename, lineno, line)) self.filename = filename self.lineno = lineno self.line = line self.args = (filename, lineno, line) class RawConfigParser: def __init__(self, defaults=None, dict_type=_default_dict, allow_no_value=False): self._dict = dict_type self._sections = self._dict() self._defaults = self._dict() if allow_no_value: self._optcre = self.OPTCRE_NV else: self._optcre = self.OPTCRE if defaults: for key, value in defaults.items(): self._defaults[self.optionxform(key)] = value def defaults(self): return self._defaults def sections(self): """Return a list of section names, excluding [DEFAULT]""" # self._sections will never have [DEFAULT] in it return self._sections.keys() def add_section(self, section): """Create a new section in the configuration. Raise DuplicateSectionError if a section by the specified name already exists. Raise ValueError if name is DEFAULT or any of it's case-insensitive variants. """ if section.lower() == "default": raise ValueError, 'Invalid section name: %s' % section if section in self._sections: raise DuplicateSectionError(section) self._sections[section] = self._dict() def has_section(self, section): """Indicate whether the named section is present in the configuration. The DEFAULT section is not acknowledged. """ return section in self._sections def options(self, section): """Return a list of option names for the given section name.""" try: opts = self._sections[section].copy() except KeyError: raise NoSectionError(section) opts.update(self._defaults) if '__name__' in opts: del opts['__name__'] return opts.keys() def read(self, filenames): """Read and parse a filename or a list of filenames. Files that cannot be opened are silently ignored; this is designed so that you can specify a list of potential configuration file locations (e.g. current directory, user's home directory, systemwide directory), and all existing configuration files in the list will be read. A single filename may also be given. Return list of successfully read files. """ if isinstance(filenames, basestring): filenames = [filenames] read_ok = [] for filename in filenames: try: fp = open(filename) except IOError: continue self._read(fp, filename) fp.close() read_ok.append(filename) return read_ok def readfp(self, fp, filename=None): """Like read() but the argument must be a file-like object. The `fp' argument must have a `readline' method. Optional second argument is the `filename', which if not given, is taken from fp.name. If fp has no `name' attribute, `<???>' is used. """ if filename is None: try: filename = fp.name except AttributeError: filename = '<???>' self._read(fp, filename) def get(self, section, option): opt = self.optionxform(option) if section not in self._sections: if section != DEFAULTSECT: raise NoSectionError(section) if opt in self._defaults: return self._defaults[opt] else: raise NoOptionError(option, section) elif opt in self._sections[section]: return self._sections[section][opt] elif opt in self._defaults: return self._defaults[opt] else: raise NoOptionError(option, section) def items(self, section): try: d2 = self._sections[section] except KeyError: if section != DEFAULTSECT: raise NoSectionError(section) d2 = self._dict() d = self._defaults.copy() d.update(d2) if "__name__" in d: del d["__name__"] return d.items() def _get(self, section, conv, option): return conv(self.get(section, option)) def getint(self, section, option): return self._get(section, int, option) def getfloat(self, section, option): return self._get(section, float, option) _boolean_states = {'1': True, 'yes': True, 'true': True, 'on': True, '0': False, 'no': False, 'false': False, 'off': False} def getboolean(self, section, option): v = self.get(section, option) if v.lower() not in self._boolean_states: raise ValueError, 'Not a boolean: %s' % v return self._boolean_states[v.lower()] def optionxform(self, optionstr): return optionstr.lower() def has_option(self, section, option): """Check for the existence of a given option in a given section.""" if not section or section == DEFAULTSECT: option = self.optionxform(option) return option in self._defaults elif section not in self._sections: return False else: option = self.optionxform(option) return (option in self._sections[section] or option in self._defaults) def set(self, section, option, value=None): """Set an option.""" if not section or section == DEFAULTSECT: sectdict = self._defaults else: try: sectdict = self._sections[section] except KeyError: raise NoSectionError(section) sectdict[self.optionxform(option)] = value def write(self, fp): """Write an .ini-format representation of the configuration state.""" if self._defaults: fp.write("[%s]\n" % DEFAULTSECT) for (key, value) in self._defaults.items(): fp.write("%s = %s\n" % (key, str(value).replace('\n', '\n\t'))) fp.write("\n") for section in self._sections: fp.write("[%s]\n" % section) for (key, value) in self._sections[section].items(): if key == "__name__": continue if (value is not None) or (self._optcre == self.OPTCRE): key = " = ".join((key, str(value).replace('\n', '\n\t'))) fp.write("%s\n" % (key)) fp.write("\n") def remove_option(self, section, option): """Remove an option.""" if not section or section == DEFAULTSECT: sectdict = self._defaults else: try: sectdict = self._sections[section] except KeyError: raise NoSectionError(section) option = self.optionxform(option) existed = option in sectdict if existed: del sectdict[option] return existed def remove_section(self, section): """Remove a file section.""" existed = section in self._sections if existed: del self._sections[section] return existed # # Regular expressions for parsing section headers and options. # SECTCRE = re.compile( r'\[' # [ r'(?P<header>[^]]+)' # very permissive! r'\]' # ] ) OPTCRE = re.compile( r'(?P<option>[^:=\s][^:=]*)' # very permissive! r'\s*(?P<vi>[:=])\s*' # any number of space/tab, # followed by separator # (either : or =), followed # by any # space/tab r'(?P<value>.*)$' # everything up to eol ) OPTCRE_NV = re.compile( r'(?P<option>[^:=\s][^:=]*)' # very permissive! r'\s*(?:' # any number of space/tab, r'(?P<vi>[:=])\s*' # optionally followed by # separator (either : or # =), followed by any # # space/tab r'(?P<value>.*))?$' # everything up to eol ) def _read(self, fp, fpname): """Parse a sectioned setup file. The sections in setup file contains a title line at the top, indicated by a name in square brackets (`[]'), plus key/value options lines, indicated by `name: value' format lines. Continuations are represented by an embedded newline then leading whitespace. Blank lines, lines beginning with a '#', and just about everything else are ignored. """ cursect = None # None, or a dictionary optname = None lineno = 0 e = None # None, or an exception while True: line = fp.readline() if not line: break lineno = lineno + 1 # comment or blank line? if line.strip() == '' or line[0] in '#;': continue if line.split(None, 1)[0].lower() == 'rem' and line[0] in "rR": # no leading whitespace continue # continuation line? if line[0].isspace() and cursect is not None and optname: value = line.strip() if value: cursect[optname].append(value) # a section header or option header? else: # is it a section header? mo = self.SECTCRE.match(line) if mo: sectname = mo.group('header') if sectname in self._sections: cursect = self._sections[sectname] elif sectname == DEFAULTSECT: cursect = self._defaults else: cursect = self._dict() cursect['__name__'] = sectname self._sections[sectname] = cursect # So sections can't start with a continuation line optname = None # no section header in the file? elif cursect is None: raise MissingSectionHeaderError(fpname, lineno, line) # an option line? else: mo = self._optcre.match(line) if mo: optname, vi, optval = mo.group('option', 'vi', 'value') optname = self.optionxform(optname.rstrip()) # This check is fine because the OPTCRE cannot # match if it would set optval to None if optval is not None: if vi in ('=', ':') and ';' in optval: # ';' is a comment delimiter only if it follows # a spacing character pos = optval.find(';') if pos != -1 and optval[pos-1].isspace(): optval = optval[:pos] optval = optval.strip() # allow empty values if optval == '""': optval = '' cursect[optname] = [optval] else: # valueless option handling cursect[optname] = optval else: # a non-fatal parsing error occurred. set up the # exception but keep going. the exception will be # raised at the end of the file and will contain a # list of all bogus lines if not e: e = ParsingError(fpname) e.append(lineno, repr(line)) # if any parsing errors occurred, raise an exception if e: raise e # join the multi-line values collected while reading all_sections = [self._defaults] all_sections.extend(self._sections.values()) for options in all_sections: for name, val in options.items(): if isinstance(val, list): options[name] = '\n'.join(val) import UserDict as _UserDict class _Chainmap(_UserDict.DictMixin): """Combine multiple mappings for successive lookups. For example, to emulate Python's normal lookup sequence: import __builtin__ pylookup = _Chainmap(locals(), globals(), vars(__builtin__)) """ def __init__(self, *maps): self._maps = maps def __getitem__(self, key): for mapping in self._maps: try: return mapping[key] except KeyError: pass raise KeyError(key) def keys(self): result = [] seen = set() for mapping in self._maps: for key in mapping: if key not in seen: result.append(key) seen.add(key) return result class ConfigParser(RawConfigParser): def get(self, section, option, raw=False, vars=None): """Get an option value for a given section. If `vars' is provided, it must be a dictionary. The option is looked up in `vars' (if provided), `section', and in `defaults' in that order. All % interpolations are expanded in the return values, unless the optional argument `raw' is true. Values for interpolation keys are looked up in the same manner as the option. The section DEFAULT is special. """ sectiondict = {} try: sectiondict = self._sections[section] except KeyError: if section != DEFAULTSECT: raise NoSectionError(section) # Update with the entry specific variables vardict = {} if vars: for key, value in vars.items(): vardict[self.optionxform(key)] = value d = _Chainmap(vardict, sectiondict, self._defaults) option = self.optionxform(option) try: value = d[option] except KeyError: raise NoOptionError(option, section) if raw or value is None: return value else: return self._interpolate(section, option, value, d) def items(self, section, raw=False, vars=None): """Return a list of tuples with (name, value) for each option in the section. All % interpolations are expanded in the return values, based on the defaults passed into the constructor, unless the optional argument `raw' is true. Additional substitutions may be provided using the `vars' argument, which must be a dictionary whose contents overrides any pre-existing defaults. The section DEFAULT is special. """ d = self._defaults.copy() try: d.update(self._sections[section]) except KeyError: if section != DEFAULTSECT: raise NoSectionError(section) # Update with the entry specific variables if vars: for key, value in vars.items(): d[self.optionxform(key)] = value options = d.keys() if "__name__" in options: options.remove("__name__") if raw: return [(option, d[option]) for option in options] else: return [(option, self._interpolate(section, option, d[option], d)) for option in options] def _interpolate(self, section, option, rawval, vars): # do the string interpolation value = rawval depth = MAX_INTERPOLATION_DEPTH while depth: # Loop through this until it's done depth -= 1 if value and "%(" in value: value = self._KEYCRE.sub(self._interpolation_replace, value) try: value = value % vars except KeyError, e: raise InterpolationMissingOptionError( option, section, rawval, e.args[0]) else: break if value and "%(" in value: raise InterpolationDepthError(option, section, rawval) return value _KEYCRE = re.compile(r"%\(([^)]*)\)s|.") def _interpolation_replace(self, match): s = match.group(1) if s is None: return match.group() else: return "%%(%s)s" % self.optionxform(s) class SafeConfigParser(ConfigParser): def _interpolate(self, section, option, rawval, vars): # do the string interpolation L = [] self._interpolate_some(option, L, rawval, section, vars, 1) return ''.join(L) _interpvar_re = re.compile(r"%\(([^)]+)\)s") def _interpolate_some(self, option, accum, rest, section, map, depth): if depth > MAX_INTERPOLATION_DEPTH: raise InterpolationDepthError(option, section, rest) while rest: p = rest.find("%") if p < 0: accum.append(rest) return if p > 0: accum.append(rest[:p]) rest = rest[p:] # p is no longer used c = rest[1:2] if c == "%": accum.append("%") rest = rest[2:] elif c == "(": m = self._interpvar_re.match(rest) if m is None: raise InterpolationSyntaxError(option, section, "bad interpolation variable reference %r" % rest) var = self.optionxform(m.group(1)) rest = rest[m.end():] try: v = map[var] except KeyError: raise InterpolationMissingOptionError( option, section, rest, var) if "%" in v: self._interpolate_some(option, accum, v, section, map, depth + 1) else: accum.append(v) else: raise InterpolationSyntaxError( option, section, "'%%' must be followed by '%%' or '(', found: %r" % (rest,)) def set(self, section, option, value=None): """Set an option. Extend ConfigParser.set: check for string values.""" # The only legal non-string value if we allow valueless # options is None, so we need to check if the value is a # string if: # - we do not allow valueless options, or # - we allow valueless options but the value is not None if self._optcre is self.OPTCRE or value: if not isinstance(value, basestring): raise TypeError("option values must be strings") if value is not None: # check for bad percent signs: # first, replace all "good" interpolations tmp_value = value.replace('%%', '') tmp_value = self._interpvar_re.sub('', tmp_value) # then, check if there's a lone percent sign left if '%' in tmp_value: raise ValueError("invalid interpolation syntax in %r at " "position %d" % (value, tmp_value.find('%'))) ConfigParser.set(self, section, option, value)
apache-2.0
40223119/2015cda
static/Brython3.1.1-20150328-091302/Lib/importlib/__init__.py
610
3472
"""A pure Python implementation of import.""" __all__ = ['__import__', 'import_module', 'invalidate_caches'] # Bootstrap help ##################################################### # Until bootstrapping is complete, DO NOT import any modules that attempt # to import importlib._bootstrap (directly or indirectly). Since this # partially initialised package would be present in sys.modules, those # modules would get an uninitialised copy of the source version, instead # of a fully initialised version (either the frozen one or the one # initialised below if the frozen one is not available). import _imp # Just the builtin component, NOT the full Python module import sys from . import machinery #fix me brython try: import _frozen_importlib as _bootstrap except ImportError: from . import _bootstrap _bootstrap._setup(sys, _imp) else: # importlib._bootstrap is the built-in import, ensure we don't create # a second copy of the module. _bootstrap.__name__ = 'importlib._bootstrap' _bootstrap.__package__ = 'importlib' _bootstrap.__file__ = __file__.replace('__init__.py', '_bootstrap.py') sys.modules['importlib._bootstrap'] = _bootstrap # To simplify imports in test code _w_long = _bootstrap._w_long _r_long = _bootstrap._r_long # Fully bootstrapped at this point, import whatever you like, circular # dependencies and startup overhead minimisation permitting :) # Public API ######################################################### from ._bootstrap import __import__ def invalidate_caches(): """Call the invalidate_caches() method on all meta path finders stored in sys.meta_path (where implemented).""" for finder in sys.meta_path: if hasattr(finder, 'invalidate_caches'): finder.invalidate_caches() def find_loader(name, path=None): """Find the loader for the specified module. First, sys.modules is checked to see if the module was already imported. If so, then sys.modules[name].__loader__ is returned. If that happens to be set to None, then ValueError is raised. If the module is not in sys.modules, then sys.meta_path is searched for a suitable loader with the value of 'path' given to the finders. None is returned if no loader could be found. Dotted names do not have their parent packages implicitly imported. You will most likely need to explicitly import all parent packages in the proper order for a submodule to get the correct loader. """ try: loader = sys.modules[name].__loader__ if loader is None: raise ValueError('{}.__loader__ is None'.format(name)) else: return loader except KeyError: pass return _bootstrap._find_module(name, path) def import_module(name, package=None): """Import a module. The 'package' argument is required when performing a relative import. It specifies the package to use as the anchor point from which to resolve the relative import to an absolute import. """ level = 0 if name.startswith('.'): if not package: raise TypeError("relative imports require the 'package' argument") for character in name: if character != '.': break level += 1 return _bootstrap._gcd_import(name[level:], package, level) #need at least one import hook for importlib stuff to work. import basehook sys.meta_path.append(basehook.BaseHook())
gpl-3.0
phborba/dsgtoolsop
ProfileTool/pyqtgraph/console/template_pyqt5.py
31
5812
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file './pyqtgraph/console/template.ui' # # Created: Wed Mar 26 15:09:29 2014 # by: PyQt5 UI code generator 5.0.1 # # WARNING! All changes made in this file will be lost! from PyQt5 import QtCore, QtGui, QtWidgets class Ui_Form(object): def setupUi(self, Form): Form.setObjectName("Form") Form.resize(710, 497) self.gridLayout = QtWidgets.QGridLayout(Form) self.gridLayout.setContentsMargins(0, 0, 0, 0) self.gridLayout.setSpacing(0) self.gridLayout.setObjectName("gridLayout") self.splitter = QtWidgets.QSplitter(Form) self.splitter.setOrientation(QtCore.Qt.Vertical) self.splitter.setObjectName("splitter") self.layoutWidget = QtWidgets.QWidget(self.splitter) self.layoutWidget.setObjectName("layoutWidget") self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget) self.verticalLayout.setContentsMargins(0, 0, 0, 0) self.verticalLayout.setObjectName("verticalLayout") self.output = QtWidgets.QPlainTextEdit(self.layoutWidget) font = QtGui.QFont() font.setFamily("Monospace") self.output.setFont(font) self.output.setReadOnly(True) self.output.setObjectName("output") self.verticalLayout.addWidget(self.output) self.horizontalLayout = QtWidgets.QHBoxLayout() self.horizontalLayout.setObjectName("horizontalLayout") self.input = CmdInput(self.layoutWidget) self.input.setObjectName("input") self.horizontalLayout.addWidget(self.input) self.historyBtn = QtWidgets.QPushButton(self.layoutWidget) self.historyBtn.setCheckable(True) self.historyBtn.setObjectName("historyBtn") self.horizontalLayout.addWidget(self.historyBtn) self.exceptionBtn = QtWidgets.QPushButton(self.layoutWidget) self.exceptionBtn.setCheckable(True) self.exceptionBtn.setObjectName("exceptionBtn") self.horizontalLayout.addWidget(self.exceptionBtn) self.verticalLayout.addLayout(self.horizontalLayout) self.historyList = QtWidgets.QListWidget(self.splitter) font = QtGui.QFont() font.setFamily("Monospace") self.historyList.setFont(font) self.historyList.setObjectName("historyList") self.exceptionGroup = QtWidgets.QGroupBox(self.splitter) self.exceptionGroup.setObjectName("exceptionGroup") self.gridLayout_2 = QtWidgets.QGridLayout(self.exceptionGroup) self.gridLayout_2.setSpacing(0) self.gridLayout_2.setContentsMargins(-1, 0, -1, 0) self.gridLayout_2.setObjectName("gridLayout_2") self.catchAllExceptionsBtn = QtWidgets.QPushButton(self.exceptionGroup) self.catchAllExceptionsBtn.setCheckable(True) self.catchAllExceptionsBtn.setObjectName("catchAllExceptionsBtn") self.gridLayout_2.addWidget(self.catchAllExceptionsBtn, 0, 1, 1, 1) self.catchNextExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup) self.catchNextExceptionBtn.setCheckable(True) self.catchNextExceptionBtn.setObjectName("catchNextExceptionBtn") self.gridLayout_2.addWidget(self.catchNextExceptionBtn, 0, 0, 1, 1) self.onlyUncaughtCheck = QtWidgets.QCheckBox(self.exceptionGroup) self.onlyUncaughtCheck.setChecked(True) self.onlyUncaughtCheck.setObjectName("onlyUncaughtCheck") self.gridLayout_2.addWidget(self.onlyUncaughtCheck, 0, 2, 1, 1) self.exceptionStackList = QtWidgets.QListWidget(self.exceptionGroup) self.exceptionStackList.setAlternatingRowColors(True) self.exceptionStackList.setObjectName("exceptionStackList") self.gridLayout_2.addWidget(self.exceptionStackList, 2, 0, 1, 5) self.runSelectedFrameCheck = QtWidgets.QCheckBox(self.exceptionGroup) self.runSelectedFrameCheck.setChecked(True) self.runSelectedFrameCheck.setObjectName("runSelectedFrameCheck") self.gridLayout_2.addWidget(self.runSelectedFrameCheck, 3, 0, 1, 5) self.exceptionInfoLabel = QtWidgets.QLabel(self.exceptionGroup) self.exceptionInfoLabel.setObjectName("exceptionInfoLabel") self.gridLayout_2.addWidget(self.exceptionInfoLabel, 1, 0, 1, 5) self.clearExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup) self.clearExceptionBtn.setEnabled(False) self.clearExceptionBtn.setObjectName("clearExceptionBtn") self.gridLayout_2.addWidget(self.clearExceptionBtn, 0, 4, 1, 1) spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum) self.gridLayout_2.addItem(spacerItem, 0, 3, 1, 1) self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1) self.retranslateUi(Form) QtCore.QMetaObject.connectSlotsByName(Form) def retranslateUi(self, Form): _translate = QtCore.QCoreApplication.translate Form.setWindowTitle(_translate("Form", "Console")) self.historyBtn.setText(_translate("Form", "History..")) self.exceptionBtn.setText(_translate("Form", "Exceptions..")) self.exceptionGroup.setTitle(_translate("Form", "Exception Handling")) self.catchAllExceptionsBtn.setText(_translate("Form", "Show All Exceptions")) self.catchNextExceptionBtn.setText(_translate("Form", "Show Next Exception")) self.onlyUncaughtCheck.setText(_translate("Form", "Only Uncaught Exceptions")) self.runSelectedFrameCheck.setText(_translate("Form", "Run commands in selected stack frame")) self.exceptionInfoLabel.setText(_translate("Form", "Exception Info")) self.clearExceptionBtn.setText(_translate("Form", "Clear Exception")) from .CmdInput import CmdInput
gpl-2.0
v00d00dem0n/PyCrashCourse
work/ch16/highs_lows.py
1
1374
import csv from matplotlib import pyplot as plt from datetime import datetime as dt # Get dats and high temps from file #filename = 'death_valley_2014.csv' #filename = 'sitka_weather_07-2014.csv' #filename = 'sitka_weather_2014.csv' filename = 'denver_weather_2001.csv' with open(filename) as f: reader = csv.reader(f) header_row = next(reader) print(header_row) for index, column_header in enumerate(header_row): print(index, column_header.lstrip()) dates, highs, lows = [], [], [] for row in reader: try: current_date = dt.strptime(row[0], "%Y-%m-%d") high = int(row[1]) low = int(row[3]) except ValueError: print(current_date, 'missing data') else: dates.append(current_date) highs.append(high) lows.append(low) # Plot data. fig = plt.figure(dpi=128, figsize=(10,6)) plt.plot(dates,highs, c='red') plt.plot(dates,lows, c='blue') plt.fill_between(dates, highs, lows, facecolor='blue', alpha=0.1) # Format plot graph. plt.title("Compare Daily High and Low Temps - 2001 - 2010", fontsize=18) plt.xlabel('', fontsize=12) fig.autofmt_xdate() plt.ylabel("Temperature (F)", fontsize=12) plt.tick_params(axis='both', which='major', labelsize=16) #plt.savefig('death_valley_and_sitka_2014_fig.png', dpi=150) plt.show()
gpl-3.0
gregbuehler/ansible-modules-extras
messaging/rabbitmq_policy.py
64
4428
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, John Dewey <john@dewey.ws> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' --- module: rabbitmq_policy short_description: Manage the state of policies in RabbitMQ. description: - Manage the state of a virtual host in RabbitMQ. version_added: "1.5" author: John Dewey options: name: description: - The name of the policy to manage. required: true default: null vhost: description: - The name of the vhost to apply to. required: false default: / pattern: description: - A regex of queues to apply the policy to. required: true default: null tags: description: - A dict or string describing the policy. required: true default: null priority: description: - The priority of the policy. required: false default: 0 node: description: - Erlang node name of the rabbit we wish to configure. required: false default: rabbit state: description: - The state of the policy. default: present choices: [present, absent] ''' EXAMPLES = ''' - name: ensure the default vhost contains the HA policy via a dict rabbitmq_policy: name=HA pattern='.*' args: tags: "ha-mode": all - name: ensure the default vhost contains the HA policy rabbitmq_policy: name=HA pattern='.*' tags="ha-mode=all" ''' class RabbitMqPolicy(object): def __init__(self, module, name): self._module = module self._name = name self._vhost = module.params['vhost'] self._pattern = module.params['pattern'] self._tags = module.params['tags'] self._priority = module.params['priority'] self._node = module.params['node'] self._rabbitmqctl = module.get_bin_path('rabbitmqctl', True) def _exec(self, args, run_in_check_mode=False): if not self._module.check_mode or (self._module.check_mode and run_in_check_mode): cmd = [self._rabbitmqctl, '-q', '-n', self._node] args.insert(1, '-p') args.insert(2, self._vhost) rc, out, err = self._module.run_command(cmd + args, check_rc=True) return out.splitlines() return list() def list(self): policies = self._exec(['list_policies'], True) for policy in policies: policy_name = policy.split('\t')[1] if policy_name == self._name: return True return False def set(self): import json args = ['set_policy'] args.append(self._name) args.append(self._pattern) args.append(json.dumps(self._tags)) args.append('--priority') args.append(self._priority) return self._exec(args) def clear(self): return self._exec(['clear_policy', self._name]) def main(): arg_spec = dict( name=dict(required=True), vhost=dict(default='/'), pattern=dict(required=True), tags=dict(type='dict', required=True), priority=dict(default='0'), node=dict(default='rabbit'), state=dict(default='present', choices=['present', 'absent']), ) module = AnsibleModule( argument_spec=arg_spec, supports_check_mode=True ) name = module.params['name'] state = module.params['state'] rabbitmq_policy = RabbitMqPolicy(module, name) changed = False if rabbitmq_policy.list(): if state == 'absent': rabbitmq_policy.clear() changed = True else: changed = False elif state == 'present': rabbitmq_policy.set() changed = True module.exit_json(changed=changed, name=name, state=state) # import module snippets from ansible.module_utils.basic import * main()
gpl-3.0
gilbert-yuan/gooderp_addons
sell/report/sell_summary.py
6
5269
# -*- coding: utf-8 -*- from odoo import tools import odoo.addons.decimal_precision as dp from odoo import models, fields class ReportSellSummary(models.Model): _name = 'report.sell.summary' _description = u'销售汇总表' _auto = False partner_id = fields.Many2one('partner', u'客户') department_id = fields.Many2one('staff.department', u'部门') user_id = fields.Many2one('res.users', u'销售员') goods = fields.Char(u'商品名') goods_id = fields.Many2one('goods', u'商品') brand_id = fields.Many2one('core.value', u'品牌') location = fields.Char(u'库位') uom = fields.Char(u'单位') uos = fields.Char(u'辅助单位') lot = fields.Char(u'批号') attribute_id = fields.Char(u'属性') warehouse = fields.Char(u'仓库') goods_qty = fields.Float(u'数量', digits=dp.get_precision('Quantity')) goods_uos_qty = fields.Float( u'辅助单位数量', digits=dp.get_precision('Quantity')) price = fields.Float(u'单价', digits=dp.get_precision('Price')) amount = fields.Float(u'销售收入', digits=dp.get_precision('Amount')) tax_amount = fields.Float(u'税额', digits=dp.get_precision('Amount')) subtotal = fields.Float(u'价税合计', digits=dp.get_precision('Amount')) margin = fields.Float(u'毛利', digits=dp.get_precision('Amount')) date = fields.Date(u'日期') last_receipt_date = fields.Date(string=u'最后收款日期') def init(self): cr = self._cr tools.drop_view_if_exists(cr, 'report_sell_summary') cr.execute( """ create or replace view report_sell_summary as ( SELECT min(wml.id) AS id, wm.partner_id AS partner_id, wm.user_id AS user_id, staff.department_id AS department_id, goods.name AS goods, goods.id AS goods_id, goods.brand AS brand_id, loc.name AS location, wml.lot AS lot, attribute.name AS attribute_id, uom.name AS uom, uos.name AS uos, wh.name AS warehouse, wm.date AS date, SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_uos_qty ELSE - wml.goods_uos_qty END) AS goods_uos_qty, SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty ELSE - wml.goods_qty END) AS goods_qty, (CASE WHEN SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty ELSE - wml.goods_qty END) = 0 THEN 0 ELSE SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount ELSE - wml.amount END) / SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty ELSE - wml.goods_qty END) END) AS price, SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount ELSE - wml.amount END) AS amount, SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.tax_amount ELSE - wml.tax_amount END) AS tax_amount, SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.subtotal ELSE - wml.subtotal END) AS subtotal, (SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.amount ELSE - wml.amount END) - SUM(CASE WHEN wm.origin = 'sell.delivery.sell' THEN wml.goods_qty ELSE - wml.goods_qty END) * wml.cost_unit) AS margin, mi.get_amount_date AS last_receipt_date FROM wh_move_line wml LEFT JOIN wh_move wm ON wml.move_id = wm.id LEFT JOIN res_users ru ON wm.user_id = ru.id LEFT JOIN staff ON staff.user_id = ru.id LEFT JOIN warehouse wh ON wml.warehouse_dest_id = wh.id OR wml.warehouse_id = wh.id LEFT JOIN goods goods ON wml.goods_id = goods.id LEFT JOIN uom uom ON goods.uom_id = uom.id LEFT JOIN uom uos ON goods.uos_id = uos.id LEFT JOIN attribute attribute on attribute.id = wml.attribute_id LEFT JOIN location loc ON loc.goods_id = wml.goods_id LEFT JOIN sell_delivery AS sd ON wm.id = sd.sell_move_id LEFT JOIN money_invoice AS mi ON mi.id = sd.invoice_id WHERE wh.type = 'stock' AND wml.state = 'done' AND wm.origin like 'sell.delivery%%' AND (goods.no_stock is null or goods.no_stock = FALSE) GROUP BY wm.partner_id, wm.user_id, staff.department_id, goods.name, goods.id, goods.brand, loc.name, wml.lot, attribute.name, uom.name, uos.name, wh.name, wml.cost_unit,wm.date, mi.get_amount_date ORDER BY goods.name, wh.name, goods_qty asc ) """)
agpl-3.0
Eksmo/django-activity-stream
actstream/models.py
1
6515
from datetime import datetime from django.db import models from django.core.urlresolvers import reverse from django.utils.translation import ugettext as _ from django.contrib.contenttypes import generic from django.contrib.contenttypes.models import ContentType from django.contrib.auth.models import User from actstream import managers, settings as actstream_settings from actstream.signals import action from actstream.actions import action_handler class Follow(models.Model): """ Lets a user follow the activities of any specific actor """ user = models.ForeignKey(User) content_type = models.ForeignKey(ContentType) object_id = models.CharField(max_length=255) follow_object = generic.GenericForeignKey() actor_only = models.BooleanField("Only follow actions where the object is " "the target.", default=True) objects = managers.FollowManager() class Meta: unique_together = ('user', 'content_type', 'object_id') def __unicode__(self): return u'%s -> %s' % (self.user, self.follow_object) class Action(models.Model): """ Action model describing the actor acting out a verb (on an optional target). Nomenclature based on http://activitystrea.ms/specs/atom/1.0/ Generalized Format:: <actor> <verb> <time> <actor> <verb> <target> <time> <actor> <verb> <action_object> <target> <time> Examples:: <justquick> <reached level 60> <1 minute ago> <brosner> <commented on> <pinax/pinax> <2 hours ago> <washingtontimes> <started follow> <justquick> <8 minutes ago> <mitsuhiko> <closed> <issue 70> on <mitsuhiko/flask> <about 2 hours ago> Unicode Representation:: justquick reached level 60 1 minute ago mitsuhiko closed issue 70 on mitsuhiko/flask 3 hours ago HTML Representation:: <a href="http://oebfare.com/">brosner</a> commented on <a href="http://github.com/pinax/pinax">pinax/pinax</a> 2 hours ago """ actor_content_type = models.ForeignKey(ContentType, related_name='actor', null=True, blank=True) actor_object_id = models.CharField(max_length=255, null=True, blank=True) actor = generic.GenericForeignKey('actor_content_type', 'actor_object_id') verb = models.CharField(max_length=255) description = models.TextField(blank=True, null=True) target_content_type = models.ForeignKey(ContentType, related_name='target', blank=True, null=True) target_object_id = models.CharField(max_length=255, blank=True, null=True) target = generic.GenericForeignKey('target_content_type', 'target_object_id') action_object_content_type = models.ForeignKey(ContentType, related_name='action_object', blank=True, null=True) action_object_object_id = models.CharField(max_length=255, blank=True, null=True) action_object = generic.GenericForeignKey('action_object_content_type', 'action_object_object_id') timestamp = models.DateTimeField(default=datetime.now) public = models.BooleanField(default=True) objects = actstream_settings.MANAGER_MODULE() class Meta: ordering = ('-timestamp', ) def __unicode__(self): ctx = { 'actor': self.actor or actstream_settings.SYSTEM_NAME, 'verb': self.verb, 'action_object': self.action_object, 'target': self.target, 'timesince': self.timesince() } if self.target: if self.action_object: return _('%(actor)s %(verb)s %(action_object)s on %(target)s %(timesince)s ago') % ctx return _('%(actor)s %(verb)s %(target)s %(timesince)s ago') % ctx if self.action_object: return _('%(actor)s %(verb)s %(action_object)s %(timesince)s ago') % ctx return _('%(actor)s %(verb)s %(timesince)s ago') % ctx def actor_url(self): """ Returns the URL to the ``actstream_actor`` view for the current actor. """ return reverse('actstream_actor', None, (self.actor_content_type.pk, self.actor_object_id)) def target_url(self): """ Returns the URL to the ``actstream_actor`` view for the current target. """ return reverse('actstream_actor', None, (self.target_content_type.pk, self.target_object_id)) def action_object_url(self): """ Returns the URL to the ``actstream_action_object`` view for the current action object """ return reverse('actstream_actor', None, (self.action_object_content_type.pk, self.action_object_object_id)) def timesince(self, now=None): """ Shortcut for the ``django.utils.timesince.timesince`` function of the current timestamp. """ from django.utils.timesince import timesince as timesince_ return timesince_(self.timestamp, now) @models.permalink def get_absolute_url(self): return ('actstream.views.detail', [self.pk]) class HiddenAction(models.Model): """ Actions, which are hidden by user """ user = models.ForeignKey(User, related_name='+') action = models.ForeignKey(Action, related_name='hidden_by_user') # convenient accessors actor_stream = Action.objects.actor action_object_stream = Action.objects.action_object target_stream = Action.objects.target user_stream = Action.objects.user model_stream = Action.objects.model_actions def setup_generic_relations(): """ Set up GenericRelations for actionable models. """ for model in actstream_settings.MODELS.values(): if not model: continue for field in ('actor', 'target', 'action_object'): if not hasattr(model, '%s_actions' % field): generic.GenericRelation(Action, content_type_field='%s_content_type' % field, object_id_field='%s_object_id' % field, related_name='actions_with_%s_%s_as_%s' % ( model._meta.app_label, model._meta.module_name, field), ).contribute_to_class(model, '%s_actions' % field) # @@@ I'm not entirely sure why this works setattr(Action, 'actions_with_%s_%s_as_%s' % ( model._meta.app_label, model._meta.module_name, field), None) setup_generic_relations() # connect the signal action.connect(action_handler, dispatch_uid='actstream.models')
bsd-3-clause
return/swift
utils/sil-opt-verify-all-modules.py
65
5971
#!/usr/bin/env python # utils/sil-opt-verify-all-modules.py - Verifies Swift modules -*- python -*- # # This source file is part of the Swift.org open source project # # Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors # Licensed under Apache License v2.0 with Runtime Library Exception # # See https://swift.org/LICENSE.txt for license information # See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors from __future__ import print_function import argparse import glob import multiprocessing import os import pipes import subprocess import sys import tempfile def get_verify_toolchain_modules_commands(toolchain_dir, sil_opt): if sil_opt is None: sil_opt = os.path.join(toolchain_dir, 'usr', 'bin', 'sil-opt') toolchain_basename = os.path.basename(toolchain_dir) if toolchain_basename.startswith('Legacy'): return [] if toolchain_basename.startswith('XcodeDefault'): toolchain_name = 'XcodeDefault' if toolchain_basename.startswith('tvOS'): toolchain_name = 'tvOS' if toolchain_basename.startswith('OSX'): toolchain_name = 'OSX' if toolchain_basename.startswith('watchOS'): toolchain_name = 'watchOS' if toolchain_basename.startswith('iOS'): toolchain_name = 'iOS' return get_verify_resource_dir_modules_commands( os.path.join(toolchain_dir, 'usr', 'lib', 'swift'), os.path.join(toolchain_dir, 'usr', 'bin', 'sil-opt'), toolchain_name) def get_verify_build_dir_commands(build_dir, toolchain_name='XcodeDefault'): return get_verify_resource_dir_modules_commands( os.path.join(build_dir, 'lib', 'swift'), os.path.join(build_dir, 'bin', 'sil-opt'), toolchain_name) def get_verify_resource_dir_modules_commands( resource_dir, sil_opt, toolchain_name): print("================================================================") print("Resource dir: " + resource_dir) print("sil-opt path: " + sil_opt) known_platforms = [ ('appletvos', 'arm64', 'arm64-apple-tvos9.0'), ('appletvsimulator', 'x86_64', 'x86_64-apple-tvos9.0'), ('iphoneos', 'armv7', 'armv7-apple-ios7.0'), ('iphoneos', 'armv7s', 'armv7s-apple-ios7.0'), ('iphoneos', 'arm64', 'arm64-apple-ios7.0'), ('iphonesimulator', 'i386', 'i386-apple-ios7.0'), ('iphonesimulator', 'x86_64', 'x86_64-apple-ios7.0'), ('macosx', 'x86_64', 'x86_64-apple-macosx10.9'), ('watchos', 'armv7k', 'armv7k-apple-watchos2.0'), ('watchsimulator', 'i386', 'i386-apple-watchos2.0'), ] commands = [] module_cache_dir = tempfile.mkdtemp( prefix="swift-testsuite-clang-module-cache") for (subdir, arch, triple) in known_platforms: modules_dir = os.path.join(resource_dir, subdir, arch) print(modules_dir) modules = glob.glob(os.path.join(modules_dir, '*.swiftmodule')) for module_file_name in modules: if module_file_name.endswith('XCTest.swiftmodule'): # FIXME: sil-opt does not have the '-F' option. continue commands.append([ 'xcrun', '--toolchain', toolchain_name, '--sdk', subdir, sil_opt, '-target', triple, '-resource-dir', resource_dir, '-module-cache-path', module_cache_dir, '-verify', module_file_name, ]) return commands def quote_shell_command(args): return " ".join([pipes.quote(a) for a in args]) def run_commands_in_parallel(commands): makefile = ".DEFAULT_GOAL := all\n" targets = [] for c in commands: target_name = "target" + str(len(targets)) targets.append(target_name) makefile += target_name + ":\n" makefile += \ "\t" + quote_shell_command(c) + \ " > {target}.stdout\n".format(target=target_name) makefile += "all: " + " ".join(targets) + "\n" temp_dir = tempfile.mkdtemp(prefix="swift-testsuite-main") with open(os.path.join(temp_dir, 'Makefile'), 'w') as makefile_file: makefile_file.write(makefile) max_processes = multiprocessing.cpu_count() subprocess.check_call([ 'make', '-C', temp_dir, '-j', str(max_processes), '--keep-going' ]) def main(): parser = argparse.ArgumentParser( formatter_class=argparse.RawDescriptionHelpFormatter, description="""Verifies Swift modules.""") parser.add_argument( "--sil-opt", help="use the specified 'sil-opt' binary", metavar="PATH") parser.add_argument( "--verify-build-dir", help="verify the Swift resource directory under the given build dir.", metavar="PATH") parser.add_argument( "--verify-xcode", help="verify the Xcode.app that is currently xcode-select'ed", action="store_true") args = parser.parse_args() if args.verify_build_dir is not None and args.verify_xcode: print("--verify-build-dir and --verify-xcode can't be used together") return 1 if args.verify_build_dir is not None: commands = get_verify_build_dir_commands(args.verify_build_dir) if args.verify_xcode: # Find Xcode. swift_path = subprocess.check_output(['xcrun', '--find', 'swift']) xcode_path = swift_path for _ in range(0, 7): xcode_path = os.path.dirname(xcode_path) toolchains_dir = os.path.join( xcode_path, 'Contents', 'Developer', 'Toolchains') toolchains = glob.glob(os.path.join(toolchains_dir, '*.xctoolchain')) commands = [] for toolchain_dir in toolchains: commands += get_verify_toolchain_modules_commands( toolchain_dir, args.sil_opt) run_commands_in_parallel(commands) return 0 if __name__ == "__main__": sys.exit(main())
apache-2.0
magos2/magOS
documentation/weblogic/WeblogicMonitor.py
2
2444
#!/usr/bin/python #=================================================================== # # Author : Copyright @AR 2017 # Script : use it to monitor your Weblogic performances(Jvm, Jdbc Pool,etc). # Tested on Weblogic 12c. # WLST is using Jython. # see: https://rmohan.com/?p=4576 # Run: # java -cp /opt/weblogic/wlserver/server/lib/weblogic.jar weblogic.WLST WeblogicMonitor.py #==================================================================== import time as systime, sys, os, traceback from sys import stdout sleep_time=10 #seconds duration =1 #minutes # Servers settings servers='AdminServer' # servers must be UP servers=servers.split(',') adminurl='t3://localhost:7001' #connect to AdminServer username='weblogic' password='weblogic1' log_dir='./' # JDBC Pool # Application # Threads # JMS #---- Function Definitions ------ def conn(): connect(username,password,adminurl) #------------------ # JVM Stats #------------------ def printJVMHeader(f): print>>f, 'Date;HeapFree;HeapSize;HeapFreePercent;HeapSizeMax' def getJVMStats(server,f): cd('/ServerRuntimes/'+server+'/JVMRuntime/'+server) heapFree=get('HeapFreeCurrent') heapSize=get('HeapSizeCurrent') heapFreePercent=get('HeapFreePercent') heapSizeMax=get('HeapSizeMax') timestamp = systime.strftime('%Y-%m-%d %H:%M:%S') print>>f, '%s;%d;%d;%d;%d' %(timestamp, heapFree, heapSize, heapFreePercent, heapSizeMax) #------------------ # Server state #------------------ def getServerState(server): try: cd('/ServerLifeCycleRuntimes/'+server) serverState = cmo.getState() except: serverState = 'UNKNOWN' return serverState; #------------------ # Reports files #------------------ files={} def openFiles(): for server in servers: f=open(log_dir+'/stats_'+server+'_JVM.csv','w') files[server]=f printJVMHeader(f) def closeFiles(): for server in servers: files[server].close() #------------------ # MAIN #------------------ print 'Connecting...' conn() domainRuntime() print 'Begging Weblogic monitoring...' try: openFiles() cpt=0 while cpt<(duration*60): for server in servers : getJVMStats(server,files[server]) systime.sleep(sleep_time) cpt=cpt+sleep_time stdout.write('.') stdout.flush() except Exception, err: print 'Error in Weblogic Monitor' traceback.print_exc() closeFiles() print '' print 'End of monitoring' exit()
gpl-3.0
pdellaert/ansible
lib/ansible/modules/network/files/net_get.py
47
2165
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2018, Ansible by Red Hat, inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'network'} DOCUMENTATION = """ --- module: net_get version_added: "2.6" author: "Deepak Agrawal (@dagrawal)" short_description: Copy a file from a network device to Ansible Controller description: - This module provides functionality to copy file from network device to ansible controller. extends_documentation_fragment: network_agnostic options: src: description: - Specifies the source file. The path to the source file can either be the full path on the network device or a relative path as per path supported by destination network device. required: true protocol: description: - Protocol used to transfer file. default: scp choices: ['scp', 'sftp'] dest: description: - Specifies the destination file. The path to the destination file can either be the full path on the Ansible control host or a relative path from the playbook or role root directory. default: - Same filename as specified in I(src). The path will be playbook root or role root directory if playbook is part of a role. requirements: - "scp" notes: - Some devices need specific configurations to be enabled before scp can work These configuration should be pre-configured before using this module e.g ios - C(ip scp server enable). - User privilege to do scp on network device should be pre-configured e.g. ios - need user privilege 15 by default for allowing scp. - Default destination of source file. """ EXAMPLES = """ - name: copy file from the network device to Ansible controller net_get: src: running_cfg_ios1.txt - name: copy file from ios to common location at /tmp net_get: src: running_cfg_sw1.txt dest : /tmp/ios1.txt """ RETURN = """ """
gpl-3.0
proversity-org/edx-platform
lms/djangoapps/branding/views.py
1
11500
"""Views for the branding app. """ import logging import urllib from django.conf import settings from django.contrib.staticfiles.storage import staticfiles_storage from django.core.cache import cache from django.core.urlresolvers import reverse from django.http import Http404, HttpResponse from django.shortcuts import redirect from django.utils import translation from django.utils.translation.trans_real import get_supported_language_variant from django.views.decorators.cache import cache_control from django.views.decorators.csrf import ensure_csrf_cookie import branding.api as branding_api import courseware.views.views import student.views from edxmako.shortcuts import marketing_link, render_to_response from openedx.core.djangoapps.lang_pref.api import released_languages from openedx.core.djangoapps.site_configuration import helpers as configuration_helpers from util.cache import cache_if_anonymous from util.json_request import JsonResponse log = logging.getLogger(__name__) @ensure_csrf_cookie @cache_if_anonymous() def index(request): ''' Redirects to main page -- info page if user authenticated, or marketing if not ''' if request.user.is_authenticated(): # Only redirect to dashboard if user has # courses in his/her dashboard. Otherwise UX is a bit cryptic. # In this case, we want to have the user stay on a course catalog # page to make it easier to browse for courses (and register) if configuration_helpers.get_value( 'ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER', settings.FEATURES.get('ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER', True)): return redirect(reverse('dashboard')) if settings.FEATURES.get('AUTH_USE_CERTIFICATES'): from openedx.core.djangoapps.external_auth.views import ssl_login # Set next URL to dashboard if it isn't set to avoid # caching a redirect to / that causes a redirect loop on logout if not request.GET.get('next'): req_new = request.GET.copy() req_new['next'] = reverse('dashboard') request.GET = req_new return ssl_login(request) enable_mktg_site = configuration_helpers.get_value( 'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False) ) if enable_mktg_site: marketing_urls = configuration_helpers.get_value( 'MKTG_URLS', settings.MKTG_URLS ) return redirect(marketing_urls.get('ROOT')) domain = request.META.get('HTTP_HOST') # keep specialized logic for Edge until we can migrate over Edge to fully use # configuration. if domain and 'edge.edx.org' in domain: return redirect(reverse("signin_user")) # we do not expect this case to be reached in cases where # marketing and edge are enabled return student.views.index(request, user=request.user) @ensure_csrf_cookie @cache_if_anonymous() def courses(request): """ Render the "find courses" page. If the marketing site is enabled, redirect to that. Otherwise, if subdomain branding is on, this is the university profile page. Otherwise, it's the edX courseware.views.views.courses page """ enable_mktg_site = configuration_helpers.get_value( 'ENABLE_MKTG_SITE', settings.FEATURES.get('ENABLE_MKTG_SITE', False) ) if enable_mktg_site: return redirect(marketing_link('COURSES'), permanent=True) if not settings.FEATURES.get('COURSES_ARE_BROWSABLE'): raise Http404 # we do not expect this case to be reached in cases where # marketing is enabled or the courses are not browsable return courseware.views.views.courses(request) def _footer_static_url(request, name): """Construct an absolute URL to a static asset. """ return request.build_absolute_uri(staticfiles_storage.url(name)) def _footer_css_urls(request, package_name): """Construct absolute URLs to CSS assets in a package. """ # We need this to work both in local development and in production. # Unfortunately, in local development we don't run the full asset pipeline, # so fully processed output files may not exist. # For this reason, we use the *css package* name(s), rather than the static file name # to identify the CSS file name(s) to include in the footer. # We then construct an absolute URI so that external sites (such as the marketing site) # can locate the assets. package = settings.PIPELINE_CSS.get(package_name, {}) paths = [package['output_filename']] if not settings.DEBUG else package['source_filenames'] return [ _footer_static_url(request, path) for path in paths ] def _render_footer_html(request, show_openedx_logo, include_dependencies, include_language_selector): """Render the footer as HTML. Arguments: show_openedx_logo (bool): If True, include the OpenEdX logo in the rendered HTML. include_dependencies (bool): If True, include JavaScript and CSS dependencies. include_language_selector (bool): If True, include a language selector with all supported languages. Returns: unicode """ bidi = 'rtl' if translation.get_language_bidi() else 'ltr' css_name = settings.FOOTER_CSS['openedx'][bidi] context = { 'hide_openedx_link': not show_openedx_logo, 'footer_js_url': _footer_static_url(request, 'js/footer-edx.js'), 'footer_css_urls': _footer_css_urls(request, css_name), 'bidi': bidi, 'include_dependencies': include_dependencies, 'include_language_selector': include_language_selector } return render_to_response("footer.html", context) @cache_control(must_revalidate=True, max_age=settings.FOOTER_BROWSER_CACHE_MAX_AGE) def footer(request): """Retrieve the branded footer. This end-point provides information about the site footer, allowing for consistent display of the footer across other sites (for example, on the marketing site and blog). It can be used in one of two ways: 1) A client renders the footer from a JSON description. 2) A browser loads an HTML representation of the footer and injects it into the DOM. The HTML includes CSS and JavaScript links. In case (2), we assume that the following dependencies are included on the page: a) JQuery (same version as used in edx-platform) b) font-awesome (same version as used in edx-platform) c) Open Sans web fonts Example: Retrieving the footer as JSON GET /api/branding/v1/footer Accepts: application/json { "navigation_links": [ { "url": "http://example.com/about", "name": "about", "title": "About" }, # ... ], "social_links": [ { "url": "http://example.com/social", "name": "facebook", "icon-class": "fa-facebook-square", "title": "Facebook", "action": "Sign up on Facebook!" }, # ... ], "mobile_links": [ { "url": "http://example.com/android", "name": "google", "image": "http://example.com/google.png", "title": "Google" }, # ... ], "legal_links": [ { "url": "http://example.com/terms-of-service.html", "name": "terms_of_service", "title': "Terms of Service" }, # ... ], "openedx_link": { "url": "http://open.edx.org", "title": "Powered by Open edX", "image": "http://example.com/openedx.png" }, "logo_image": "http://example.com/static/images/logo.png", "copyright": "EdX, Open edX and their respective logos are trademarks or registered trademarks of edX Inc." } Example: Retrieving the footer as HTML GET /api/branding/v1/footer Accepts: text/html Example: Including the footer with the "Powered by Open edX" logo GET /api/branding/v1/footer?show-openedx-logo=1 Accepts: text/html Example: Retrieving the footer in a particular language GET /api/branding/v1/footer?language=en Accepts: text/html Example: Retrieving the footer with a language selector GET /api/branding/v1/footer?include-language-selector=1 Accepts: text/html Example: Retrieving the footer with all JS and CSS dependencies (for testing) GET /api/branding/v1/footer?include-dependencies=1 Accepts: text/html """ if not branding_api.is_enabled(): raise Http404 # Use the content type to decide what representation to serve accepts = request.META.get('HTTP_ACCEPT', '*/*') # Show the OpenEdX logo in the footer show_openedx_logo = bool(request.GET.get('show-openedx-logo', False)) # Include JS and CSS dependencies # This is useful for testing the end-point directly. include_dependencies = bool(request.GET.get('include-dependencies', False)) # Override the language if necessary language = request.GET.get('language', translation.get_language()) try: language = get_supported_language_variant(language) except LookupError: language = settings.LANGUAGE_CODE # Include a language selector include_language_selector = request.GET.get('include-language-selector', '') == '1' # Render the footer information based on the extension if 'text/html' in accepts or '*/*' in accepts: cache_params = { 'language': language, 'show_openedx_logo': show_openedx_logo, 'include_dependencies': include_dependencies } if include_language_selector: cache_params['language_selector_options'] = ','.join(sorted([lang.code for lang in released_languages()])) cache_key = u"branding.footer.{params}.html".format(params=urllib.urlencode(cache_params)) content = cache.get(cache_key) if content is None: with translation.override(language): content = _render_footer_html( request, show_openedx_logo, include_dependencies, include_language_selector ) cache.set(cache_key, content, settings.FOOTER_CACHE_TIMEOUT) return HttpResponse(content, status=200, content_type="text/html; charset=utf-8") elif 'application/json' in accepts: cache_key = u"branding.footer.{params}.json".format( params=urllib.urlencode({ 'language': language, 'is_secure': request.is_secure(), }) ) footer_dict = cache.get(cache_key) if footer_dict is None: with translation.override(language): footer_dict = branding_api.get_footer(is_secure=request.is_secure()) cache.set(cache_key, footer_dict, settings.FOOTER_CACHE_TIMEOUT) return JsonResponse(footer_dict, 200, content_type="application/json; charset=utf-8") else: return HttpResponse(status=406)
agpl-3.0
proxysh/Safejumper-for-Desktop
buildlinux/env64/local/lib/python2.7/types.py
304
2040
"""Define names for all type symbols known in the standard interpreter. Types that are part of optional modules (e.g. array) are not listed. """ import sys # Iterators in Python aren't a matter of type but of protocol. A large # and changing number of builtin types implement *some* flavor of # iterator. Don't check the type! Use hasattr to check for both # "__iter__" and "next" attributes instead. NoneType = type(None) TypeType = type ObjectType = object IntType = int LongType = long FloatType = float BooleanType = bool try: ComplexType = complex except NameError: pass StringType = str # StringTypes is already outdated. Instead of writing "type(x) in # types.StringTypes", you should use "isinstance(x, basestring)". But # we keep around for compatibility with Python 2.2. try: UnicodeType = unicode StringTypes = (StringType, UnicodeType) except NameError: StringTypes = (StringType,) BufferType = buffer TupleType = tuple ListType = list DictType = DictionaryType = dict def _f(): pass FunctionType = type(_f) LambdaType = type(lambda: None) # Same as FunctionType CodeType = type(_f.func_code) def _g(): yield 1 GeneratorType = type(_g()) class _C: def _m(self): pass ClassType = type(_C) UnboundMethodType = type(_C._m) # Same as MethodType _x = _C() InstanceType = type(_x) MethodType = type(_x._m) BuiltinFunctionType = type(len) BuiltinMethodType = type([].append) # Same as BuiltinFunctionType ModuleType = type(sys) FileType = file XRangeType = xrange try: raise TypeError except TypeError: tb = sys.exc_info()[2] TracebackType = type(tb) FrameType = type(tb.tb_frame) del tb SliceType = slice EllipsisType = type(Ellipsis) DictProxyType = type(TypeType.__dict__) NotImplementedType = type(NotImplemented) # For Jython, the following two types are identical GetSetDescriptorType = type(FunctionType.func_code) MemberDescriptorType = type(FunctionType.func_globals) del sys, _f, _g, _C, _x # Not for export
gpl-2.0
artificilabs/trellio
trellio/conf_manager/conf_client.py
2
11406
import copy import importlib import json import logging import os from trellio.services import TCPService, HTTPService from ..utils.log_handlers import BufferingSMTPHandler logger = logging.getLogger(__name__) GLOBAL_CONFIG = { "RONIN": False, "HOST_NAME": "", "ADMIN_EMAILS": [], "SERVICE_NAME": "", "SERVICE_VERSION": "", "REGISTRY_HOST": "", "REGISTRY_PORT": "", "REDIS_HOST": "", "REDIS_PORT": "", "HTTP_HOST": "", "TCP_HOST": "", "HTTP_PORT": "", "TCP_PORT": "", "SIGNALS": {}, "MIDDLEWARES": [], "APPS": [], "DATABASE_SETTINGS": { "database": "", "user": "", "password": "", "host": "", "port": "" }, "SMTP_SETTINGS": {} } class InvalidConfigurationError(Exception): pass class ConfigHandler: smtp_host = 'SMTP_HOST' smtp_user = 'SMTP_USER' smtp_port = 'SMTP_PORT' smtp_password = 'SMTP_PASSWORD' admin_emails = 'ADMIN_EMAILS' middleware_key = 'MIDDLEWARES' signal_key = 'SIGNALS' service_name_key = 'SERVICE_NAME' host_name_key = 'HOST_NAME' service_version_key = 'SERVICE_VERSION' reg_host_key = "REGISTRY_HOST" reg_port_key = "REGISTRY_PORT" redis_host_key = "REDIS_HOST" redis_port_key = "REDIS_PORT" http_host_key = "HTTP_HOST" tcp_host_key = "TCP_HOST" http_port_key = "HTTP_PORT" tcp_port_key = "TCP_PORT" database_key = 'DATABASE_SETTINGS' ronin_key = "RONIN" smtp_key = 'SMTP_SETTINGS' apps_key = 'APPS' # service_path_key = "SERVICE_PATH" def __init__(self, host_class): self.settings = None self.host = host_class @property def service_name(self): return self.settings[self.service_name_key] def get_tcp_clients(self): from trellio.services import TCPServiceClient tcp_clients = self.inheritors(TCPServiceClient) return tcp_clients def get_http_clients(self): from trellio.services import HTTPServiceClient http_clients = self.inheritors(HTTPServiceClient) return http_clients def get_subscribers(self): from trellio.pubsub import Subscriber subscriber_classes = self.inheritors(Subscriber) subscribers = [] for subs in subscriber_classes: s = subs() s.pubsub_host = self.settings[self.redis_host_key] s.pubsub_port = self.settings[self.redis_port_key] subscribers.append(s) return subscribers def configure_host(self, host): host.configure( host_name=self.settings[self.host_name_key], service_name=self.settings[self.service_name_key], service_version=self.settings[self.service_version_key], http_host=self.settings[self.http_host_key], http_port=self.settings[self.http_port_key], tcp_host=self.settings[self.tcp_host_key], tcp_port=self.settings[self.tcp_port_key], registry_host=self.settings[self.reg_host_key], registry_port=self.settings[self.reg_port_key], pubsub_host=self.settings[self.redis_host_key], pubsub_port=self.settings[self.reg_port_key], ronin=self.settings[self.ronin_key] ) def setup_host(self): host = self.host self.configure_host(host) publisher = self.get_publisher() subscribers = self.get_subscribers() if publisher: host.attach_publisher(publisher) if subscribers: host.attach_subscribers(subscribers) http_service = self.get_http_service() tcp_service = self.get_tcp_service() tcp_clients = self.get_tcp_clients() http_clients = self.get_http_clients() http_views = self.get_http_views() tcp_views = self.get_tcp_views() if not http_service: http_service = HTTPService(host.service_name, host.service_version, host.http_host, host.http_port) if not tcp_service: tcp_service = TCPService(host.service_name, host.service_version, host.tcp_host, host.tcp_port) self.enable_signals() self.enable_middlewares(http_service=http_service, http_views=http_views) if http_service: # self.register_http_views(http_service) host.attach_service(http_service) http_service.clients = [i() for i in http_clients + tcp_clients] # self.register_tcp_views(tcp_service) host.attach_service(tcp_service) if http_service: tcp_service.clients = http_service.clients if http_views: host.attach_http_views(http_views) for view_inst in host.get_tcp_views(): pass if tcp_views: host.attach_tcp_views(tcp_views) _tcp_service = host.get_tcp_service() _tcp_service.tcp_views = host._tcp_views host._smtp_handler = self.get_smtp_logging_handler() def get_database_settings(self): return self.settings[self.database_key] def set_config(self, config_path): settings = None with open(config_path) as f: settings = json.load(f) new_settings = copy.deepcopy(GLOBAL_CONFIG) new_settings.update(settings) self.settings = new_settings parent_dir = os.getcwd().split('/')[-1] client_path = parent_dir + '.clients' service_path = parent_dir + '.service' try: importlib.import_module(client_path) except: logger.warning('No clients found') service_imported = True service_exception = None try: importlib.import_module(service_path) except Exception as e: service_imported = False service_exception = e.__traceback__ if self.settings.get(self.apps_key): apps = self.settings[self.apps_key] for app in apps: views_path = parent_dir + '.{}.views'.format(app) try: importlib.import_module(views_path) except Exception as e: print(e.__traceback__.__str__()) else: if not service_imported: print(service_exception.__str__()) def get_smtp_logging_handler(self): if self.settings.get(self.smtp_key): keys = ["smtp_host", "smtp_port", "smtp_user", "smtp_password"] setting_keys = self.settings[self.smtp_key].keys() missing_keys = list(filter(lambda x: x not in setting_keys, keys)) if not missing_keys: handler = BufferingSMTPHandler(mailhost=self.settings[self.smtp_key]['smtp_host'], mailport=self.settings[self.smtp_key]['smtp_port'], fromaddr=self.settings[self.smtp_key]['smtp_user'], toaddrs=self.settings[self.admin_emails], subject='Error {} {}:{}'.format(self.settings[self.host_name_key], self.settings[ self.service_name_key].upper(), self.settings[self.service_version_key]), capacity=1, password=self.settings[self.smtp_key]['smtp_password']) handler.setLevel(logging.ERROR) if not self.settings[self.ronin_key]: return handler def get_http_service(self): from trellio.services import HTTPService http_service = None if HTTPService.__subclasses__(): service_sub_class = HTTPService.__subclasses__()[0] http_service = service_sub_class(self.settings[self.service_name_key], self.settings[self.service_version_key], self.settings[self.http_host_key], self.settings[self.http_port_key]) return http_service def get_tcp_service(self): from trellio.services import TCPService tcp_service = None if TCPService.__subclasses__(): service_sub_class = TCPService.__subclasses__()[0] tcp_service = service_sub_class(self.settings[self.service_name_key], self.settings[self.service_version_key], self.settings[self.tcp_host_key], self.settings[self.tcp_port_key]) return tcp_service def get_publisher(self): from trellio.pubsub import Publisher publisher = None if Publisher.__subclasses__(): publisher_sub_class = Publisher.__subclasses__()[0] publisher = publisher_sub_class(self.settings[self.service_name_key], self.settings[self.service_version_key], self.settings[self.redis_host_key], self.settings[self.redis_port_key]) return publisher def get_http_views(self): from trellio.views import HTTPView return self.inheritors(HTTPView) def get_tcp_views(self): from trellio.views import TCPView return self.inheritors(TCPView) def import_class_from_path(self, path): broken = path.split('.') class_name = broken[-1] module_name = '.'.join(broken[:-1]) module = importlib.import_module(module_name) class_value = getattr(module, class_name) return module, class_value def enable_middlewares(self, http_service=None, http_views=()): middlewares = self.settings[self.middleware_key] or [] middle_cls = [] for i in middlewares: module, class_value = self.import_class_from_path(i) if not class_value: raise InvalidConfigurationError else: middle_cls.append(class_value()) if http_service: http_service.middlewares = middle_cls for view in http_views: view.middlewares = middle_cls def enable_signals(self): ''' e.g signal_dict = {signal_path:signal_receiver_path_list, ....} :return: ''' signal_dict = self.settings[self.signal_key] or {} for i in signal_dict.keys(): sig_module, signal_class = self.import_class_from_path(i) for j in signal_dict[i]: recv_module, recv_coro = self.import_class_from_path(j) signal_class.register(recv_coro) # registering reciever @staticmethod def inheritors(klass): subclasses = set() work = [klass] while work: parent = work.pop() for child in parent.__subclasses__(): if child not in subclasses: subclasses.add(child) work.append(child) return list(subclasses)
mit
dobbscoin/dobbscoin-source
qa/rpc-tests/httpbasics.py
2
4763
#!/usr/bin/env python2 # Copyright (c) 2014 The Dobbscoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test REST interface # from test_framework import DobbscoinTestFramework from util import * import base64 try: import http.client as httplib except ImportError: import httplib try: import urllib.parse as urlparse except ImportError: import urlparse class HTTPBasicsTest (DobbscoinTestFramework): def setup_nodes(self): return start_nodes(4, self.options.tmpdir, extra_args=[['-rpckeepalive=1'], ['-rpckeepalive=0'], [], []]) def run_test(self): ################################################# # lowlevel check for http persistent connection # ################################################# url = urlparse.urlparse(self.nodes[0].url) authpair = url.username + ':' + url.password headers = {"Authorization": "Basic " + base64.b64encode(authpair)} conn = httplib.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open! #send 2nd request without closing connection conn.request('POST', '/', '{"method": "getchaintips"}', headers) out2 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open! conn.close() #same should be if we add keep-alive because this should be the std. behaviour headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection": "keep-alive"} conn = httplib.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open! #send 2nd request without closing connection conn.request('POST', '/', '{"method": "getchaintips"}', headers) out2 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) #must also response with a correct json-rpc message assert_equal(conn.sock!=None, True) #according to http/1.1 connection must still be open! conn.close() #now do the same with "Connection: close" headers = {"Authorization": "Basic " + base64.b64encode(authpair), "Connection":"close"} conn = httplib.HTTPConnection(url.hostname, url.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) assert_equal(conn.sock!=None, False) #now the connection must be closed after the response #node1 (2nd node) is running with disabled keep-alive option urlNode1 = urlparse.urlparse(self.nodes[1].url) authpair = urlNode1.username + ':' + urlNode1.password headers = {"Authorization": "Basic " + base64.b64encode(authpair)} conn = httplib.HTTPConnection(urlNode1.hostname, urlNode1.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) assert_equal(conn.sock!=None, False) #connection must be closed because keep-alive was set to false #node2 (third node) is running with standard keep-alive parameters which means keep-alive is off urlNode2 = urlparse.urlparse(self.nodes[2].url) authpair = urlNode2.username + ':' + urlNode2.password headers = {"Authorization": "Basic " + base64.b64encode(authpair)} conn = httplib.HTTPConnection(urlNode2.hostname, urlNode2.port) conn.connect() conn.request('POST', '/', '{"method": "getbestblockhash"}', headers) out1 = conn.getresponse().read(); assert_equal('"error":null' in out1, True) assert_equal(conn.sock!=None, True) #connection must be closed because dobbscoind should use keep-alive by default if __name__ == '__main__': HTTPBasicsTest ().main ()
mit
mingpz2010/DMAT
benchmark/trilinos-11.12.1-Source/project-checkin-test-config.py
1
2273
# This file allows project-level configuration of the checkin-test system to # set project options that are required for all developers. Machine or package # specific options should not be placed in this file. # This is a dictionary that specifies project-specific options for the # checkin-test script that should be used by all developers. This # includes default command line arguments that must be passed to # checkin-test as well as settings for specific builds. configuration = { # The default command line arguments that should be used by all # developers. 'defaults': { '--send-email-to-on-push': 'trilinos-checkin-tests@software.sandia.gov', }, # CMake options for various build configurations. All entries in # this dictionary should be Python lists of -D arguments to cmake. 'cmake': { # Options that are common to all builds. 'common': [ '-DTPL_ENABLE_Pthread:BOOL=OFF', '-DTPL_ENABLE_BinUtils:BOOL=OFF', ], # Setup for the builds that should be run by default for a # standard checkin. This is a list of tuples so a preference # for build order can be expressed (e.g. if a project's # developers prefer one case to fail earlier than another). 'default-builds': [ # Options for the MPI_DEBUG build. ('MPI_DEBUG', [ '-DTPL_ENABLE_MPI:BOOL=ON', '-DCMAKE_BUILD_TYPE:STRING=RELEASE', '-DTrilinos_ENABLE_DEBUG:BOOL=ON', '-DTrilinos_ENABLE_CHECKED_STL:BOOL=ON', '-DTrilinos_ENABLE_DEBUG_SYMBOLS:BOOL=ON', '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON', '-DTeuchos_ENABLE_DEFAULT_STACKTRACE:BOOL=OFF', ]), # Options for the SERIAL_RELEASE build. ('SERIAL_RELEASE', [ '-DTPL_ENABLE_MPI:BOOL=OFF', '-DCMAKE_BUILD_TYPE:STRING=RELEASE', '-DTrilinos_ENABLE_DEBUG:BOOL=OFF', '-DTrilinos_ENABLE_CHECKED_STL:BOOL=OFF', '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=OFF', ]), ], # default-builds }, # cmake } # configuration
gpl-2.0
lamblin/pylearn2
pylearn2/packaged_dependencies/theano_linear/linear.py
44
17994
""" .. todo:: WRITEME """ import numpy import theano from theano import tensor from theano.compat.six.moves import reduce from pylearn2.utils import py_integer_types prod = numpy.prod def dot(x, y): """ Return the linear transformation of `y` by `x` or `x` by `y` when one or both of `x` and `y` is a LinearTransform instance Parameters ---------- x : WRITEME y : WRITEME Returns ------- WRITEME """ if isinstance(x, LinearTransform): return x.rmul(y) elif isinstance(y, LinearTransform): return y.lmul(x) else: return theano.dot(x,y) def dot_shape_from_shape(x, y): """ Compute `dot(x, y).shape` from the shape of the non-LinearTransform Parameters ---------- x : WRITEME y : WRITEME Returns ------- WRITEME """ if isinstance(x, LinearTransform): if type(y) != tuple: raise TypeError('y should be tuple', y) return x.col_shape() + x.split_right_shape(y, False)[1] elif isinstance(y, LinearTransform): if type(x) != tuple: raise TypeError('x should be tuple', x) return y.split_left_shape(x, False)[0] + y.row_shape() else: raise TypeError('One of x or y should be a LinearTransform') def dot_shape(x, y): """ Return the linear transformation of `y` by `x` or `x` by `y` when one or both of `x` and `y` is a LinearTransform instance Parameters ---------- x : WRITEME y : WRITEME Returns ------- WRITEME """ if isinstance(x, LinearTransform): return dot_shape_from_shape(x, tuple(y.shape)) elif isinstance(y, LinearTransform): return dot_shape_from_shape(tuple(x.shape), y) else: raise TypeError('One of x or y should be a LinearTransform') class LinearTransform(object): """ .. todo:: WRITEME Parameters ---------- params : list List of theano shared variables that parametrize the linear transformation """ def __init__(self, params): self.set_params(params) def set_params(self, params): """ .. todo:: WRITEME """ self._params = list(params) def params(self): """ .. todo:: WRITEME """ return list(self._params) def __str__(self): """ .. todo:: WRITEME """ return self.__class__.__name__ +'{}' # N.B. Don't implement __mul__ and __lmul__ because these mean # element-wise multiplication in numpy land. def __add__(self, other): """ .. todo:: WRITEME """ return Sum([self, other]) def __radd__(self, other): """ .. todo:: WRITEME """ return Sum([other, self]) # OVER-RIDE THIS (or rmul) def lmul(self, x): """ .. todo:: WRITEME """ # this is a circular definition with rmul so that they are both # implemented as soon as one of them is overridden by a base class. try: # dot(x, A) # = dot(A.T, x.T).T AT_xT = self.rmul_T(self.transpose_left(x, False)) rval = self.transpose_right(AT_xT, True) return rval except RuntimeError as e: if 'ecursion' in str(e): raise TypeError('either lmul or rmul_T must be implemented') raise except TypeError as e: if 'either lmul' in str(e): raise TypeError('either lmul or rmul_T must be implemented') def lmul_T(self, x): """ .. todo:: WRITEME """ # this is a circular definition with rmul so that they are both # implemented as soon as one of them is overridden by a base class. # dot(x, A.T) # = dot(A, x.T).T A_xT = self.rmul(self.transpose_right(x, True)) rval = self.transpose_left(A_xT, True) return rval # OVER-RIDE THIS (or lmul) def rmul(self, x): """ .. todo:: WRITEME """ # this is a circular definition with rmul so that they are both # implemented as soon as one of them is overridden by a base class. try: # dot(A, x) # = dot(x.T, A.T).T xT_AT = self.lmul_T(self.transpose_right(x, False)) rval = self.transpose_left(xT_AT, False) return rval except RuntimeError as e: if 'ecursion' in str(e): raise TypeError('either rmul or lmul_T must be implemented') raise except TypeError as e: if 'either lmul' in str(e): raise TypeError('either rmul or lmul_T must be implemented') def rmul_T(self, x): """ .. todo:: WRITEME """ # this is a circular definition with rmul so that they are both # implemented as soon as one of them is overridden by a base class. # dot (A.T, x) # = dot(x.T, A).T xT_A = self.lmul(self.transpose_left(x, True)) rval = self.transpose_right(xT_A, True) return rval def transpose_left(self, x, T): """ .. todo:: WRITEME """ # supposing self.row_shape is (R1,)... cshp = self.col_shape() if T: # C1 C2 C3 R1 R2 -> R1 R2 C1 C2 C3 ss = len(cshp) else: # R1 R2 C1 C2 C3 -> C1 C2 C3 R1 R2 ss = x.ndim - len(cshp) pattern = list(range(ss, x.ndim)) + list(range(ss)) return x.transpose(pattern) def transpose_right(self, x, T): """ .. todo:: WRITEME """ # supposing self.row_shape is (R1,)... rshp = self.row_shape() if T: # C1 C2 R1 -> R1 C1 C2 ss = len(rshp) else: # R1 C1 C2 -> C1 C2 R1 ss = x.ndim - len(rshp) pattern = list(range(ss, x.ndim)) + list(range(ss)) return x.transpose(pattern) def split_left_shape(self, xshp, T): """ .. todo:: WRITEME """ if type(xshp) != tuple: raise TypeError('need tuple', xshp) # supposing self.col_shape is (C1, C2, C3) ... cshp = self.col_shape() assert type(cshp) == tuple if T: # C1 C2 C3 R1 R2 ss = len(cshp) RR, CC = xshp[ss:], xshp[:ss] else: # R1 R2 C1 C2 C3 ss = len(xshp) - len(cshp) RR, CC = xshp[:ss], xshp[ss:] if len(CC) != len(cshp) or ( not all((isinstance(cc, theano.Variable) or cc == ci) for cc, ci in zip(CC, cshp))): raise ValueError('invalid left shape', dict(xshp=xshp, col_shape=cshp, xcols=CC, T=T)) if T: return CC, RR else: return RR, CC def split_right_shape(self, xshp, T): """ .. todo:: WRITEME """ if type(xshp) != tuple: raise TypeError('need tuple', xshp) # supposing self.row_shape is (R1, R2) ... rshp = self.row_shape() assert type(rshp) == tuple if T: # C1 C2 C3 R1 R2 ss = len(xshp) - len(rshp) RR, CC = xshp[ss:], xshp[:ss] else: # R1 R2 C1 C2 C3 ss = len(rshp) RR, CC = xshp[:ss], xshp[ss:] if len(RR) != len(rshp) or ( not all((isinstance(rr, theano.Variable) or rr == ri) for rr, ri in zip(RR, rshp))): raise ValueError('invalid left shape', dict(xshp=xshp, row_shape=rshp, xrows=RR, T=T)) if T: return CC, RR else: return RR, CC def transpose_left_shape(self, xshp, T): """ .. todo:: WRITEME """ RR, CC = self.split_left_shape(xshp, T) return CC + RR def transpose_right_shape(self, xshp, T): """ .. todo:: WRITEME """ RR, CC = self.split_right_shape(xshp, T) return CC + RR def is_valid_left_shape(self, xshp, T): """ .. todo:: WRITEME """ try: self.split_left_shape(xshp, T) return True except ValueError: return False def is_valid_right_shape(self, xshp, T): """ .. todo:: WRITEME """ try: self.split_right_shape(xshp, T) return True except ValueError: return False # OVER-RIDE THIS def row_shape(self): """ .. todo:: WRITEME """ raise NotImplementedError('override me') # OVER-RIDE THIS def col_shape(self): """ .. todo:: WRITEME """ raise NotImplementedError('override me') def transpose(self): """ .. todo:: WRITEME """ return TransposeTransform(self) T = property(lambda self: self.transpose()) # OVER-RIDE THIS def tile_columns(self, **kwargs): raise NotImplementedError('override me') class TransposeTransform(LinearTransform): """ .. todo:: WRITEME Parameters ---------- base : WRITEMe """ def __init__(self, base): super(TransposeTransform, self).__init__([]) self.base = base def transpose(self): """ .. todo:: WRITEME """ return self.base def params(self): """ .. todo:: WRITEME """ return self.base.params() def lmul(self, x): """ .. todo:: WRITEME """ return self.base.lmul_T(x) def lmul_T(self, x): """ .. todo:: WRITEME """ return self.base.lmul(x) def rmul(self, x): """ .. todo:: WRITEME """ return self.base.rmul_T(x) def rmul_T(self, x): """ .. todo:: WRITEME """ return self.base.rmul(x) def transpose_left(self, x, T): """ .. todo:: WRITEME """ return self.base.transpose_right(x, not T) def transpose_right(self, x, T): """ .. todo:: WRITEME """ return self.base.transpose_left(x, not T) def transpose_left_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.transpose_right_shape(x, not T) def transpose_right_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.transpose_left_shape(x, not T) def split_left_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.split_right_shape(x, not T) def split_right_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.split_left_shape(x, not T) def is_valid_left_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.is_valid_right_shape(x, not T) def is_valid_right_shape(self, x, T): """ .. todo:: WRITEME """ return self.base.is_valid_left_shape(x, not T) def row_shape(self): """ .. todo:: WRITEME """ return self.base.col_shape() def col_shape(self): """ .. todo:: WRITEME """ return self.base.row_shape() def print_status(self): """ .. todo:: WRITEME """ return self.base.print_status() def tile_columns(self): """ .. todo:: WRITEME """ # yes, it would be nice to do rows, but since this is a visualization # and there *is* no tile_rows, we fall back on this. return self.base.tile_columns() use_concat_class = 0 if use_concat_class: # needs to be brought up to date with LinearTransform method names class Concat(LinearTransform): """ Form a linear map of the form [A B ... Z]. For this to be valid, A,B...Z must have identical row_shape. The col_shape defaults to being the concatenation of flattened output from each of A,B,...Z, but a col_shape tuple specified via the constructor will reshape that vector. Parameters ---------- Wlist : WRITEME col_shape : WRITEME """ def __init__(self, Wlist, col_shape=None): super(Concat, self).__init__([]) self._Wlist = list(Wlist) if not (isinstance(col_shape, py_integer_types) or isinstance(col_shape, (tuple, type(None)))): raise TypeError('col_shape must be int or int tuple') self._col_sizes = [prod(w.col_shape()) for w in Wlist] if col_shape is None: self.__col_shape = sum(self._col_sizes), elif isinstance(col_shape, py_integer_types): self.__col_shape = col_shape, else: self.__col_shape = tuple(col_shape) assert prod(self.__col_shape) == sum(self._col_sizes) self.__row_shape = Wlist[0].row_shape() for W in Wlist[1:]: if W.row_shape() != self.row_shape(): raise ValueError('Transforms has different row_shape', W.row_shape()) def params(self): rval = [] for W in self._Wlist: rval.extend(W.params()) return rval def _lmul(self, x, T): if T: if len(self.col_shape())>1: x2 = x.flatten(2) else: x2 = x n_rows = x2.shape[0] offset = 0 xWlist = [] assert len(self._col_sizes) == len(self._Wlist) for size, W in zip(self._col_sizes, self._Wlist): # split the output rows into pieces x_s = x2[:,offset:offset+size] # multiply each piece by one transform xWlist.append( W.lmul( x_s.reshape( (n_rows,)+W.col_shape()), T)) offset += size # sum the results rval = tensor.add(*xWlist) else: # multiply the input by each transform xWlist = [W.lmul(x,T).flatten(2) for W in self._Wlist] # join the resuls rval = tensor.join(1, *xWlist) return rval def _col_shape(self): return self.__col_shape def _row_shape(self): return self.__row_shape def _tile_columns(self): # hard-coded to produce RGB images arrays = [W._tile_columns() for W in self._Wlist] o_rows = sum([a.shape[0]+10 for a in arrays]) - 10 o_cols = max([a.shape[1] for a in arrays]) rval = numpy.zeros( (o_rows, o_cols, 3), dtype=arrays[0].dtype) offset = 0 for a in arrays: if a.ndim==2: a = a[:,:,None] #make greyscale broadcast over colors rval[offset:offset+a.shape[0], 0:a.shape[1],:] = a offset += a.shape[0] + 10 return rval def print_status(self): for W in self._Wlist: W.print_status() use_sum_class = 0 if use_sum_class: # needs to be brought up to date with LinearTransform method names class Sum(LinearTransform): def __init__(self, terms): self.terms = terms for t in terms[1:]: assert t.row_shape() == terms[0].row_shape() assert t.col_shape() == terms[0].col_shape() def params(self): rval = [] for t in self.terms: rval.extend(t.params()) return rval def _lmul(self, x, T): raise NotImplementedError() #results = [t._lmul(x, T)] #return tensor.add(*results) def _row_shape(self): return self.terms[0].col_shape() def _col_shape(self): return self.terms[0].row_shape() def print_status(self): raise NotImplementedError('TODO: fix old broken implementation') #for t in terms: # t.print_status() def _tile_columns(self): raise NotImplementedError('TODO') use_compose_class = 0 if use_compose_class: # This is incomplete class Compose(LinearTransform): """ For linear transformations [A,B,C] this represents the linear transformation A(B(C(x))). """ def __init__(self, linear_transformations): self._linear_transformations = linear_transformations def dot(self, x): return reduce( lambda t,a:t.dot(a), self._linear_transformations, x) def transpose_dot(self, x): return reduce( lambda t, a: t.transpose_dot(a), reversed(self._linear_transformations), x) def params(self): return reduce( lambda t, a: a + t.params(), self._linear_transformations, [])
bsd-3-clause
marlengit/BitcoinUnlimited
qa/rpc-tests/mempool_spendcoinbase.py
1
2409
#!/usr/bin/env python3 # Copyright (c) 2014-2015 The Bitcoin Core developers # Copyright (c) 2015-2016 The Bitcoin Unlimited developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. # # Test spending coinbase transactions. # The coinbase transaction in block N can appear in block # N+100... so is valid in the mempool when the best block # height is N+99. # This test makes sure coinbase spends that will be mature # in the next block are accepted into the memory pool, # but less mature coinbase spends are NOT. # from test_framework.test_framework import BitcoinTestFramework from test_framework.util import * # Create one-input, one-output, no-fee transaction: class MempoolSpendCoinbaseTest(BitcoinTestFramework): def setup_network(self): # Just need one node for this test args = ["-checkmempool", "-debug=mempool"] self.nodes = [] self.nodes.append(start_node(0, self.options.tmpdir, args)) self.is_network_split = False def run_test(self): chain_height = self.nodes[0].getblockcount() assert_equal(chain_height, 200) node0_address = self.nodes[0].getnewaddress() # Coinbase at height chain_height-100+1 ok in mempool, should # get mined. Coinbase at height chain_height-100+2 is # is too immature to spend. b = [ self.nodes[0].getblockhash(n) for n in range(101, 103) ] coinbase_txids = [ self.nodes[0].getblock(h)['tx'][0] for h in b ] spends_raw = [ create_tx(self.nodes[0], txid, node0_address, 50) for txid in coinbase_txids ] spend_101_id = self.nodes[0].sendrawtransaction(spends_raw[0]) # coinbase at height 102 should be too immature to spend assert_raises(JSONRPCException, self.nodes[0].sendrawtransaction, spends_raw[1]) # mempool should have just spend_101: assert_equal(self.nodes[0].getrawmempool(), [ spend_101_id ]) # mine a block, spend_101 should get confirmed self.nodes[0].generate(1) assert_equal(set(self.nodes[0].getrawmempool()), set()) # ... and now height 102 can be spent: spend_102_id = self.nodes[0].sendrawtransaction(spends_raw[1]) assert_equal(self.nodes[0].getrawmempool(), [ spend_102_id ]) if __name__ == '__main__': MempoolSpendCoinbaseTest().main()
mit
baylee/django
django/db/models/base.py
8
74321
from __future__ import unicode_literals import copy import inspect import warnings from itertools import chain from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connections, router, transaction, ) from django.db.models import signals from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import CASCADE, Collector from django.db.models.fields import AutoField from django.db.models.fields.related import ( ForeignObjectRel, ManyToOneRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.manager import Manager from django.db.models.options import Options from django.db.models.query import Q from django.db.models.utils import make_model_tuple from django.utils import six from django.utils.deprecation import RemovedInDjango20Warning from django.utils.encoding import ( force_str, force_text, python_2_unicode_compatible, ) from django.utils.functional import curry from django.utils.six.moves import zip from django.utils.text import capfirst, get_text_list from django.utils.translation import ugettext_lazy as _ from django.utils.version import get_version @python_2_unicode_compatible class Deferred(object): def __repr__(self): return str('<Deferred field>') def __str__(self): return str('<Deferred field>') DEFERRED = Deferred() def subclass_exception(name, parents, module, attached_to=None): """ Create exception subclass. Used by ModelBase below. If 'attached_to' is supplied, the exception will be created in a way that allows it to be pickled, assuming the returned exception class will be added as an attribute to the 'attached_to' class. """ class_dict = {'__module__': module} if attached_to is not None: def __reduce__(self): # Exceptions are special - they've got state that isn't # in self.__dict__. We assume it is all in self.args. return (unpickle_inner_exception, (attached_to, name), self.args) def __setstate__(self, args): self.args = args class_dict['__reduce__'] = __reduce__ class_dict['__setstate__'] = __setstate__ return type(name, parents, class_dict) class ModelBase(type): """ Metaclass for all models. """ def __new__(cls, name, bases, attrs): super_new = super(ModelBase, cls).__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_class = super_new(cls, name, bases, {'__module__': module}) attr_meta = attrs.pop('Meta', None) abstract = getattr(attr_meta, 'abstract', False) if not attr_meta: meta = getattr(new_class, 'Meta', None) else: meta = attr_meta base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and isn't in an application in " "INSTALLED_APPS." % (module, name) ) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( str('DoesNotExist'), tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( str('MultipleObjectsReturned'), tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) # Add all attributes to the class. for obj_name, obj in attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.private_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is None: base = parent elif parent._meta.concrete_model is not base._meta.concrete_model: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField): related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Track fields inherited from base models. inherited_attributes = set() # Do the appropriate setup for any model parents. for base in new_class.mro(): if base not in parents or not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. inherited_attributes |= set(base.__dict__.keys()) continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many if not base._meta.abstract: # Check for clashes between locally declared fields and those # on the base classes. for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: inherited_attributes.add(field.name) # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) if attr_name in field_names: raise FieldError( "Auto-generated field '%s' in class %r for " "parent_link to base class %r clashes with " "declared field of the same name." % ( attr_name, name, base.__name__, ) ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: base_parents = base._meta.parents.copy() # Add fields from abstract base class if it wasn't overridden. for field in parent_fields: if (field.name not in field_names and field.name not in new_class.__dict__ and field.name not in inherited_attributes): new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Replace parent links defined on this base by the new # field. It will be appropriately resolved if required. if field.one_to_one: for parent, parent_link in base_parents.items(): if field == parent_link: base_parents[parent] = new_field # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base_parents) # Inherit private fields (like GenericForeignKey) from the parent # class for field in base._meta.private_fields: if field.name in field_names: if not base._meta.abstract: raise FieldError( 'Local field %r in class %r clashes with field of ' 'the same name from base class %r.' % ( field.name, name, base.__name__, ) ) else: new_class.add_to_class(field.name, copy.deepcopy(field)) if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def add_to_class(cls, name, value): # We should call the contribute_to_class method only if it's bound if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """ Creates some methods once self._meta has been populated. """ opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False) # Defer creating accessors on the foreign class until it has been # created and registered. If remote_field is None, we're ordering # with respect to a GenericForeignKey and don't know what the # foreign class is - we'll add those accessors later in # contribute_to_class(). if opts.order_with_respect_to.remote_field: wrt = opts.order_with_respect_to remote = wrt.remote_field.model lazy_related_operation(make_foreign_order_accessors, cls, remote) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) if not opts.managers or cls._requires_legacy_default_manager(): if any(f.name == 'objects' for f in opts.fields): raise ValueError( "Model %s must specify a custom Manager, because it has a " "field named 'objects'." % cls.__name__ ) manager = Manager() manager.auto_created = True cls.add_to_class('objects', manager) signals.class_prepared.send(sender=cls) def _requires_legacy_default_manager(cls): # RemovedInDjango20Warning opts = cls._meta if opts.manager_inheritance_from_future: return False future_default_manager = opts.default_manager # Step 1: Locate a manager that would have been promoted # to default manager with the legacy system. for manager in opts.managers: originating_model = manager._originating_model if (cls is originating_model or cls._meta.proxy or originating_model._meta.abstract): if manager is not cls._default_manager and not opts.default_manager_name: warnings.warn( "Managers from concrete parents will soon qualify as default " "managers if they appear before any other managers in the " "MRO. As a result, '{legacy_default_manager}' declared on " "'{legacy_default_manager_model}' will no longer be the " "default manager for '{model}' in favor of " "'{future_default_manager}' declared on " "'{future_default_manager_model}'. " "You can redeclare '{legacy_default_manager}' on '{cls}' " "to keep things the way they are or you can switch to the new " "behavior right away by setting " "`Meta.manager_inheritance_from_future` to `True`.".format( cls=cls.__name__, model=opts.label, legacy_default_manager=manager.name, legacy_default_manager_model=manager._originating_model._meta.label, future_default_manager=future_default_manager.name, future_default_manager_model=future_default_manager._originating_model._meta.label, ), RemovedInDjango20Warning, 2 ) opts.default_manager_name = manager.name opts._expire_cache() break # Step 2: Since there are managers but none of them qualified as # default managers under the legacy system (meaning that there are # managers from concrete parents that would be promoted under the # new system), we need to create a new Manager instance for the # 'objects' attribute as a deprecation shim. else: # If the "future" default manager was auto created there is no # point warning the user since it's basically the same manager. if not future_default_manager.auto_created: warnings.warn( "Managers from concrete parents will soon qualify as " "default managers. As a result, the 'objects' manager " "won't be created (or recreated) automatically " "anymore on '{model}' and '{future_default_manager}' " "declared on '{future_default_manager_model}' will be " "promoted to default manager. You can declare " "explicitly `objects = models.Manager()` on '{cls}' " "to keep things the way they are or you can switch " "to the new behavior right away by setting " "`Meta.manager_inheritance_from_future` to `True`.".format( cls=cls.__name__, model=opts.label, future_default_manager=future_default_manager.name, future_default_manager_model=future_default_manager._originating_model._meta.label, ), RemovedInDjango20Warning, 2 ) return True @property def _base_manager(cls): return cls._meta.base_manager @property def _default_manager(cls): return cls._meta.default_manager class ModelState(object): """ A class for storing instance state """ def __init__(self, db=None): self.db = db # If true, uniqueness validation checks will consider this a new, as-yet-unsaved object. # Necessary for correct validation of new instances of objects with explicit (non-auto) PKs. # This impacts validation only; it has no effect on the actual save. self.adding = True class Model(six.with_metaclass(ModelBase)): def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): if val is DEFERRED: continue setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): if val is DEFERRED: continue setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.remote_field, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # Virtual field if field.attname not in kwargs and field.column is None: continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. if rel_obj is not DEFERRED: setattr(self, field.name, rel_obj) else: if val is not DEFERRED: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: # Any remaining kwargs must correspond to properties or # virtual fields. if (isinstance(getattr(self.__class__, prop), property) or self._meta.get_field(prop)): if kwargs[prop] is not DEFERRED: setattr(self, prop, kwargs[prop]) del kwargs[prop] except (AttributeError, FieldDoesNotExist): pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self) @classmethod def from_db(cls, db, field_names, values): if len(values) != len(cls._meta.concrete_fields): values = list(values) values.reverse() values = [values.pop() if f.attname in field_names else DEFERRED for f in cls._meta.concrete_fields] new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): try: u = six.text_type(self) except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' return force_str('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if six.PY2 and hasattr(self, '__unicode__'): return force_text(self).encode('utf-8') return str('%s object' % self.__class__.__name__) def __eq__(self, other): if not isinstance(other, Model): return False if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self._get_pk_val() if my_pk is None: return self is other return my_pk == other._get_pk_val() def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if self._get_pk_val() is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self._get_pk_val()) def __reduce__(self): """ Provides pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ data = self.__dict__ data[DJANGO_VERSION_PICKLE_KEY] = get_version() class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id,), data def __setstate__(self, state): msg = None pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: current_version = get_version() if current_version != pickled_version: msg = ( "Pickled model instance's Django version %s does not match " "the current version %s." % (pickled_version, current_version) ) else: msg = "Pickled model instance's Django version is not specified." if msg: warnings.warn(msg, RuntimeWarning, stacklevel=2) self.__dict__.update(state) def _get_pk_val(self, meta=None): if not meta: meta = self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Returns a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if f.attname not in self.__dict__ } def refresh_from_db(self, using=None, fields=None): """ Reloads field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is not None: if len(fields) == 0: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) db = using if using is not None else self._state.db db_instance_qs = self.__class__._default_manager.using(db).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. deferred_fields = self.get_deferred_fields() if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif deferred_fields: fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Throw away stale foreign key references. if field.is_relation and field.get_cache_name() in self.__dict__: rel_instance = getattr(self, field.get_cache_name()) local_val = getattr(db_instance, field.attname) related_val = None if rel_instance is None else getattr(rel_instance, field.target_field.attname) if local_val != related_val or (local_val is None and related_val is None): del self.__dict__[field.get_cache_name()] self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Saves the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save() would result in silent data loss. for field in self._meta.concrete_fields: if field.is_relation: # If the related field isn't cached, then an instance hasn't # been assigned and there's no need to worry about this check. try: getattr(self, field.get_cache_name()) except AttributeError: continue obj = getattr(self, field.name, None) # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj and obj.pk is None: # Remove the object from a related instance cache. if not field.remote_field.multiple: delattr(obj, field.remote_field.get_cache_name()) raise ValueError( "save() prohibited to prevent data loss due to " "unsaved related object '%s'." % field.name ) using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") deferred_fields = self.get_deferred_fields() if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if len(update_fields) == 0: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do a "update_fields" save on the loaded fields. elif not force_insert and deferred_fields and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.atomic(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """ Saves all the parents of cls using values from self. """ meta = cls._meta for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self._save_parents(cls=parent, using=using, update_fields=update_fields) self._save_table(cls=parent, using=using, update_fields=update_fields) # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. cache_name = field.get_cache_name() if hasattr(self, cache_name): delattr(self, cache_name) def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Updates or inserts the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to filter_args = field.get_filter_kwargs_for_object(self) order_value = cls._base_manager.using(using).filter(**filter_args).count() self._order = order_value fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if not isinstance(f, AutoField)] update_pk = bool(meta.has_auto_field and not pk_set) result = self._do_insert(cls._base_manager, using, fields, update_pk, raw) if update_pk: setattr(self, meta.pk.attname, result) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ This method will try to update the model. If the model was updated (in the sense that an update query was done and a matching row was found from the DB) the method will return True. """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: if filtered.exists(): # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. return filtered._update(values) > 0 or filtered.exists() else: return False return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, update_pk, raw): """ Do an INSERT. If update_pk is defined then this method should return the new pk for the model. """ return manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) return force_text(dict(field.flatchoices).get(value, value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = force_text(getattr(self, field.attname)) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to filter_args = order_field.get_filter_kwargs_for_object(self) obj = self.__class__._default_manager.filter(**filter_args).filter(**{ '_order__%s' % op: self.__class__._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Gather a list of checks to perform. Since validate_unique could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) for model_class, unique_together in unique_togethers: for check in unique_together: for name in check: # If this is an excluded field, don't add this check. if name in exclude: break else: unique_checks.append((model_class, tuple(check))) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) if lookup_value is None: # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'lookup_type': lookup_type, 'field': field_name, 'field_label': six.text_type(capfirst(field.verbose_name)), 'date_field': unique_for, 'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = six.text_type(capfirst(field.verbose_name)) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = six.text_type(get_text_list(field_labels, _('and'))) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [] errors.extend(cls._check_swappable()) errors.extend(cls._check_model()) errors.extend(cls._check_managers(**kwargs)) if not cls._meta.swapped: errors.extend(cls._check_fields(**kwargs)) errors.extend(cls._check_m2m_through_same_relationship()) errors.extend(cls._check_long_column_names()) clash_errors = cls._check_id_field() + cls._check_field_name_clashes() errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors.extend(cls._check_index_together()) errors.extend(cls._check_unique_together()) errors.extend(cls._check_ordering()) return errors @classmethod def _check_swappable(cls): """ Check if the swapped model exists. """ errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """ Perform all manager checks. """ errors = [] for manager in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """ Perform all field checks. """ errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two many-to-many relations through " "the intermediate model '%s'." % f.remote_field.through._meta.label, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """ Check if `id` field is a primary key. """ fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk) # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """ Ref #17673. """ errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents, including auto-generated fields like multi-table inheritance # child accessors. for parent in cls._meta.get_parent_list(): for f in parent._meta.get_fields(): if f not in used_fields: used_fields[f.name] = f for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = f.name == "id" and clash and clash.name == "id" and clash.model == cls if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_index_together(cls): """ Check the value of "index_together" option. """ if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """ Check the value of "unique_together" option. """ if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = { field.name: field for field in cls._meta._get_fields(reverse=False) } errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the non-existent field '%s'." % ( option, field_name, ), obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( "'%s' refers to field '%s' which is not local to model '%s'." % (option, field_name, cls._meta.object_name), hint="This issue may be caused by multi-table inheritance.", obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( "'ordering' must be a tuple or list (even if you want to order by only one field).", obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip '?' fields. fields = (f for f in fields if f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). fields = (f for f in fields if '__' not in f) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or non-existent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the non-existent field '%s'." % invalid_field, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in settings.DATABASES.keys(): # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if f.db_column is None and column_name is not None and len(column_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len: errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=( "Use 'through' to create a separate model for " "M2M and then set column_name using 'db_column'." ), obj=cls, id='models.E019', ) ) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(ordered_obj, self, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) # FIXME: It would be nice if there was an "update many" version of update # for situations like this. with transaction.atomic(using=using, savepoint=False): for i, j in enumerate(id_list): ordered_obj.objects.filter(pk=j, **filter_args).update(_order=i) def method_get_order(ordered_obj, self): order_wrt = ordered_obj._meta.order_with_respect_to filter_args = order_wrt.get_forward_related_filter(self) pk_name = ordered_obj._meta.pk.name return ordered_obj.objects.filter(**filter_args).values_list(pk_name, flat=True) def make_foreign_order_accessors(model, related_model): setattr( related_model, 'get_%s_order' % model.__name__.lower(), curry(method_get_order, model) ) setattr( related_model, 'set_%s_order' % model.__name__.lower(), curry(method_set_order, model) ) ######## # MISC # ######## def model_unpickle(model_id): """ Used to unpickle Model subclasses with deferred fields. """ if isinstance(model_id, tuple): model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id return model.__new__(model) model_unpickle.__safe_for_unpickle__ = True def unpickle_inner_exception(klass, exception_name): # Get the exception class from the class it is attached to: exception = getattr(klass, exception_name) return exception.__new__(exception)
bsd-3-clause
Azure/azure-sdk-for-python
sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/operations/_web_application_firewall_policies_operations.py
1
20908
# coding=utf-8 # -------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for license information. # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is regenerated. # -------------------------------------------------------------------------- from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error from azure.core.paging import ItemPaged from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpRequest, HttpResponse from azure.core.polling import LROPoller, NoPolling, PollingMethod from azure.mgmt.core.exceptions import ARMErrorFormat from azure.mgmt.core.polling.arm_polling import ARMPolling from .. import models as _models if TYPE_CHECKING: # pylint: disable=unused-import,ungrouped-imports from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union T = TypeVar('T') ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] class WebApplicationFirewallPoliciesOperations(object): """WebApplicationFirewallPoliciesOperations operations. You should not instantiate this class directly. Instead, you should create a Client instance that instantiates it for you and attaches it as an attribute. :ivar models: Alias to model classes used in this operation group. :type models: ~azure.mgmt.network.v2021_02_01.models :param client: Client for service requests. :param config: Configuration of service client. :param serializer: An object model serializer. :param deserializer: An object model deserializer. """ models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config def list( self, resource_group_name, # type: str **kwargs # type: Any ): # type: (...) -> Iterable["_models.WebApplicationFirewallPolicyListResult"] """Lists all of the protection policies within a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_02_01.models.WebApplicationFirewallPolicyListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore def list_all( self, **kwargs # type: Any ): # type: (...) -> Iterable["_models.WebApplicationFirewallPolicyListResult"] """Gets all the WAF policies in a subscription. :keyword callable cls: A custom type or function that will be passed the direct response :return: An iterator like instance of either WebApplicationFirewallPolicyListResult or the result of cls(response) :rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_02_01.models.WebApplicationFirewallPolicyListResult] :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicyListResult"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" accept = "application/json" def prepare_request(next_link=None): # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') if not next_link: # Construct URL url = self.list_all.metadata['url'] # type: ignore path_format_arguments = { 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') request = self._client.get(url, query_parameters, header_parameters) else: url = next_link query_parameters = {} # type: Dict[str, Any] request = self._client.get(url, query_parameters, header_parameters) return request def extract_data(pipeline_response): deserialized = self._deserialize('WebApplicationFirewallPolicyListResult', pipeline_response) list_of_elem = deserialized.value if cls: list_of_elem = cls(list_of_elem) return deserialized.next_link or None, iter(list_of_elem) def get_next(next_link=None): request = prepare_request(next_link) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) return pipeline_response return ItemPaged( get_next, extract_data ) list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies'} # type: ignore def get( self, resource_group_name, # type: str policy_name, # type: str **kwargs # type: Any ): # type: (...) -> "_models.WebApplicationFirewallPolicy" """Retrieve protection policy with specified name within a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param policy_name: The name of the policy. :type policy_name: str :keyword callable cls: A custom type or function that will be passed the direct response :return: WebApplicationFirewallPolicy, or the result of cls(response) :rtype: ~azure.mgmt.network.v2021_02_01.models.WebApplicationFirewallPolicy :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" accept = "application/json" # Construct URL url = self.get.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.get(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore def create_or_update( self, resource_group_name, # type: str policy_name, # type: str parameters, # type: "_models.WebApplicationFirewallPolicy" **kwargs # type: Any ): # type: (...) -> "_models.WebApplicationFirewallPolicy" """Creates or update policy with specified rule set name within a resource group. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param policy_name: The name of the policy. :type policy_name: str :param parameters: Policy to be created. :type parameters: ~azure.mgmt.network.v2021_02_01.models.WebApplicationFirewallPolicy :keyword callable cls: A custom type or function that will be passed the direct response :return: WebApplicationFirewallPolicy, or the result of cls(response) :rtype: ~azure.mgmt.network.v2021_02_01.models.WebApplicationFirewallPolicy :raises: ~azure.core.exceptions.HttpResponseError """ cls = kwargs.pop('cls', None) # type: ClsType["_models.WebApplicationFirewallPolicy"] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" content_type = kwargs.pop("content_type", "application/json") accept = "application/json" # Construct URL url = self.create_or_update.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str') header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') body_content_kwargs = {} # type: Dict[str, Any] body_content = self._serialize.body(parameters, 'WebApplicationFirewallPolicy') body_content_kwargs['content'] = body_content request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 201]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if response.status_code == 200: deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response) if response.status_code == 201: deserialized = self._deserialize('WebApplicationFirewallPolicy', pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore def _delete_initial( self, resource_group_name, # type: str policy_name, # type: str **kwargs # type: Any ): # type: (...) -> None cls = kwargs.pop('cls', None) # type: ClsType[None] error_map = { 401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError } error_map.update(kwargs.pop('error_map', {})) api_version = "2021-02-01" accept = "application/json" # Construct URL url = self._delete_initial.metadata['url'] # type: ignore path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } url = self._client.format_url(url, **path_format_arguments) # Construct parameters query_parameters = {} # type: Dict[str, Any] query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str') # Construct headers header_parameters = {} # type: Dict[str, Any] header_parameters['Accept'] = self._serialize.header("accept", accept, 'str') request = self._client.delete(url, query_parameters, header_parameters) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200, 202, 204]: map_error(status_code=response.status_code, response=response, error_map=error_map) raise HttpResponseError(response=response, error_format=ARMErrorFormat) if cls: return cls(pipeline_response, None, {}) _delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore def begin_delete( self, resource_group_name, # type: str policy_name, # type: str **kwargs # type: Any ): # type: (...) -> LROPoller[None] """Deletes Policy. :param resource_group_name: The name of the resource group. :type resource_group_name: str :param policy_name: The name of the policy. :type policy_name: str :keyword callable cls: A custom type or function that will be passed the direct response :keyword str continuation_token: A continuation token to restart a poller from a saved state. :keyword polling: Pass in True if you'd like the ARMPolling polling method, False for no polling, or your own initialized polling object for a personal polling strategy. :paramtype polling: bool or ~azure.core.polling.PollingMethod :keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present. :return: An instance of LROPoller that returns either None or the result of cls(response) :rtype: ~azure.core.polling.LROPoller[None] :raises ~azure.core.exceptions.HttpResponseError: """ polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod] cls = kwargs.pop('cls', None) # type: ClsType[None] lro_delay = kwargs.pop( 'polling_interval', self._config.polling_interval ) cont_token = kwargs.pop('continuation_token', None) # type: Optional[str] if cont_token is None: raw_result = self._delete_initial( resource_group_name=resource_group_name, policy_name=policy_name, cls=lambda x,y,z: x, **kwargs ) kwargs.pop('error_map', None) kwargs.pop('content_type', None) def get_long_running_output(pipeline_response): if cls: return cls(pipeline_response, None, {}) path_format_arguments = { 'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'), 'policyName': self._serialize.url("policy_name", policy_name, 'str', max_length=128, min_length=0), 'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'), } if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs) elif polling is False: polling_method = NoPolling() else: polling_method = polling if cont_token: return LROPoller.from_continuation_token( polling_method=polling_method, continuation_token=cont_token, client=self._client, deserialization_callback=get_long_running_output ) else: return LROPoller(self._client, raw_result, get_long_running_output, polling_method) begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/ApplicationGatewayWebApplicationFirewallPolicies/{policyName}'} # type: ignore
mit
5y/kivy
kivy/uix/settings.py
4
40187
'''Settings ======== .. versionadded:: 1.0.7 This module is a complete and extensible framework for building a Settings interface in your application. By default the interface uses a :class:`SettingsWithSpinner`, which consists of a :class:`~kivy.uix.spinner.Spinner` (top) to switch between individual settings panels (bottom). See :ref:`differentlayouts` for some alternatives. .. image:: images/settingswithspinner_kivy.jpg :align: center :class:`SettingsPanel` represents a group of configurable options. The :data:`SettingsPanel.title` property is used by :class:`Settings` when a panel is added - it determines the name of the sidebar button. SettingsPanel controls a :class:`~kivy.config.ConfigParser` instance. The panel can be automatically constructed from a JSON definition file: you describe the settings you want and corresponding sections/keys in the ConfigParser instance... and you're done! Settings are also integrated with the :class:`~kivy.app.App` class. Use :func:`Settings.add_kivy_panel` to configure the Kivy core settings in a panel. .. _settings_json: Create panel from JSON ---------------------- To create a panel from a JSON-file, you need two things: * a :class:`~kivy.config.ConfigParser` instance with default values * a JSON file .. warning:: The :class:`kivy.config.ConfigParser` is required. You cannot use the default ConfigParser from Python libraries. You must create and handle the :class:`~kivy.config.ConfigParser` object. SettingsPanel will read the values from the associated ConfigParser instance. Make sure you have default values for all sections/keys in your JSON file! The JSON file contains structured information to describe the available settings. Here is an example:: [ { "type": "title", "title": "Windows" }, { "type": "bool", "title": "Fullscreen", "desc": "Set the window in windowed or fullscreen", "section": "graphics", "key": "fullscreen", "true": "auto" } ] Each element in the root list represents a setting that the user can configure. Only the "type" key is mandatory: an instance of the associated class will be created and used for the setting - other keys are assigned to corresponding properties of that class. ============== ================================================= Type Associated class -------------- ------------------------------------------------- title :class:`SettingTitle` bool :class:`SettingBoolean` numeric :class:`SettingNumeric` options :class:`SettingOptions` string :class:`SettingString` path :class:`SettingPath` (new from 1.1.0) ============== ================================================= In the JSON example above, the first element is of type "title". It will create a new instance of :class:`SettingTitle` and apply the rest of the key/value pairs to the properties of that class, i.e., "title": "Windows" sets the :data:`SettingTitle.title` property to "Windows". To load the JSON example to a :class:`Settings` instance, use the :meth:`Settings.add_json_panel` method. It will automatically instantiate :class:`SettingsPanel` and add it to :class:`Settings`:: from kivy.config import ConfigParser config = ConfigParser() config.read('myconfig.ini') s = Settings() s.add_json_panel('My custom panel', config, 'settings_custom.json') s.add_json_panel('Another panel', config, 'settings_test2.json') # then use the s as a widget... .. _differentlayouts: Different panel layouts ----------------------- A kivy :class:`~kivy.app.App` can automatically create and display a :class:`Settings` instance. See the :attr:`~kivy.app.App.settings_cls` documentation for details on how to choose which settings class to display. Several pre-built settings widgets are available. All except :class:`SettingsWithNoMenu` include close buttons triggering the on_close event. - :class:`Settings`: Displays settings with a sidebar at the left to switch between json panels. This is the default behaviour. - :class:`SettingsWithSidebar`: A trivial subclass of :class:`Settings`. - :class:`SettingsWithSpinner`: Displays settings with a spinner at the top, which can be used to switch between json panels. Uses :class:`InterfaceWithSpinner` as the :data:`~Settings.interface_cls`. - :class:`SettingsWithTabbedPanel`: Displays json panels as individual tabs in a :class:`~kivy.uix.tabbedpanel.TabbedPanel`. Uses :class:`InterfaceWithTabbedPanel` as the :data:`~Settings.interface_cls`. - :class:`SettingsWithNoMenu`: Displays a single json panel, with no way to switch to other panels and no close button. This makes it impossible for the user to exit unless :meth:`~kivy.app.App.close_settings` is overridden with a different close trigger! Uses :class:`InterfaceWithNoMenu` as the :data:`~Settings.interface_cls`. You can construct your own settings panels with any layout you choose by setting :data:`Settings.interface_cls`. This should be a widget that displays a json settings panel with some way to switch between panels. An instance will be automatically created by :class:`Settings`. Interface widgets may be anything you like, but *must* have a method add_panel that recieves newly created json settings panels for the interface to display. See the documentation for :class:`InterfaceWithSidebar` for more information. They may optionally dispatch an on_close event, for instance if a close button is clicked, which is used by :class:`Settings` to trigger its own on_close event. ''' __all__ = ('Settings', 'SettingsPanel', 'SettingItem', 'SettingString', 'SettingPath', 'SettingBoolean', 'SettingNumeric', 'SettingOptions', 'SettingsWithSidebar', 'SettingsWithSpinner', 'SettingsWithTabbedPanel', 'SettingsWithNoMenu', 'InterfaceWithSidebar', 'ContentPanel') import json import os from kivy.metrics import dp from kivy.config import ConfigParser from kivy.animation import Animation from kivy.compat import string_types, text_type from kivy.uix.boxlayout import BoxLayout from kivy.uix.tabbedpanel import TabbedPanelHeader from kivy.uix.button import Button from kivy.uix.filechooser import FileChooserListView from kivy.uix.scrollview import ScrollView from kivy.uix.floatlayout import FloatLayout from kivy.uix.gridlayout import GridLayout from kivy.uix.label import Label from kivy.uix.popup import Popup from kivy.uix.textinput import TextInput from kivy.uix.togglebutton import ToggleButton from kivy.uix.widget import Widget from kivy.properties import ObjectProperty, StringProperty, ListProperty, \ BooleanProperty, NumericProperty, DictProperty class SettingSpacer(Widget): # Internal class, not documented. pass class SettingItem(FloatLayout): '''Base class for individual settings (within a panel). This class cannot be used directly; it is used for implementing the other setting classes. It builds a row with title/description (left) and setting control (right). Look at :class:`SettingBoolean`, :class:`SettingNumeric` and :class:`SettingOptions` for usage example. :Events: `on_release` Fired when the item is touched then released ''' title = StringProperty('<No title set>') '''Title of the setting, default to '<No title set>'. :data:`title` is a :class:`~kivy.properties.StringProperty`, default to '<No title set>'. ''' desc = StringProperty(None, allownone=True) '''Description of the setting, rendered on the line below title. :data:`desc` is a :class:`~kivy.properties.StringProperty`, default to None. ''' disabled = BooleanProperty(False) '''Indicate if this setting is disabled. If True, all touches on the setting item will be discarded. :data:`disabled` is a :class:`~kivy.properties.BooleanProperty`, default to False. ''' section = StringProperty(None) '''Section of the token inside the :class:`~kivy.config.ConfigParser` instance. :data:`section` is a :class:`~kivy.properties.StringProperty`, default to None. ''' key = StringProperty(None) '''Key of the token inside the :data:`section` in the :class:`~kivy.config.ConfigParser` instance. :data:`key` is a :class:`~kivy.properties.StringProperty`, default to None. ''' value = ObjectProperty(None) '''Value of the token, according to the :class:`~kivy.config.ConfigParser` instance. Any change to the value will trigger a :meth:`Settings.on_config_change` event. :data:`value` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' panel = ObjectProperty(None) '''(internal) Reference to the SettingsPanel with this setting. You don't need to use it. :data:`panel` is a :class:`~kivy.properties.ObjectProperty`, default to None ''' content = ObjectProperty(None) '''(internal) Reference to the widget that contains the real setting. As soon as the content object is set, any further call to add_widget will call the content.add_widget. This is automatically set. :data:`content` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' selected_alpha = NumericProperty(0) '''(internal) Float value from 0 to 1, used to animate the background when the user touches the item. :data:`selected_alpha` is a :class:`~kivy.properties.NumericProperty`, default to 0. ''' __events__ = ('on_release', ) def __init__(self, **kwargs): super(SettingItem, self).__init__(**kwargs) self.value = self.panel.get_value(self.section, self.key) def add_widget(self, *largs): if self.content is None: return super(SettingItem, self).add_widget(*largs) return self.content.add_widget(*largs) def on_touch_down(self, touch): if not self.collide_point(*touch.pos): return if self.disabled: return touch.grab(self) self.selected_alpha = 1 return super(SettingItem, self).on_touch_down(touch) def on_touch_up(self, touch): if touch.grab_current is self: touch.ungrab(self) self.dispatch('on_release') Animation(selected_alpha=0, d=.25, t='out_quad').start(self) return True return super(SettingItem, self).on_touch_up(touch) def on_release(self): pass def on_value(self, instance, value): if not self.section or not self.key: return # get current value in config panel = self.panel if not isinstance(value, string_types): value = str(value) panel.set_value(self.section, self.key, value) class SettingBoolean(SettingItem): '''Implementation of a boolean setting on top of :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.switch.Switch` widget. By default, 0 and 1 are used for values, you can change them by setting :data:`values`. ''' values = ListProperty(['0', '1']) '''Values used to represent the state of the setting. If you use "yes" and "no" in your ConfigParser instance:: SettingBoolean(..., values=['no', 'yes']) .. warning:: You need a minimum of two values, the index 0 will be used as False, and index 1 as True :data:`values` is a :class:`~kivy.properties.ListProperty`, default to ['0', '1'] ''' class SettingString(SettingItem): '''Implementation of a string setting on top of :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a :class:`~kivy.uix.textinput.Textinput` so the user can enter a custom value. ''' popup = ObjectProperty(None, allownone=True) '''(internal) Used to store the current popup when it's shown :data:`popup` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' textinput = ObjectProperty(None) '''(internal) Used to store the current textinput from the popup, and to listen for changes. :data:`popup` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' def on_panel(self, instance, value): if value is None: return self.bind(on_release=self._create_popup) def _dismiss(self, *largs): if self.textinput: self.textinput.focus = False if self.popup: self.popup.dismiss() self.popup = None def _validate(self, instance): self._dismiss() value = self.textinput.text.strip() self.value = value def _create_popup(self, instance): # create popup layout content = BoxLayout(orientation='vertical', spacing='5dp') self.popup = popup = Popup(title=self.title, content=content, size_hint=(None, None), size=('400dp', '250dp')) # create the textinput used for numeric input self.textinput = textinput = TextInput(text=self.value, font_size='24sp', multiline=False, size_hint_y=None, height='42sp') textinput.bind(on_text_validate=self._validate) self.textinput = textinput # construct the content, widget are used as a spacer content.add_widget(Widget()) content.add_widget(textinput) content.add_widget(Widget()) content.add_widget(SettingSpacer()) # 2 buttons are created for accept or cancel the current value btnlayout = BoxLayout(size_hint_y=None, height='50dp', spacing='5dp') btn = Button(text='Ok') btn.bind(on_release=self._validate) btnlayout.add_widget(btn) btn = Button(text='Cancel') btn.bind(on_release=self._dismiss) btnlayout.add_widget(btn) content.add_widget(btnlayout) # all done, open the popup ! popup.open() class SettingPath(SettingItem): '''Implementation of a Path setting on top of :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a :class:`~kivy.uix.filechooser.FileChooserListView` so the user can enter a custom value. .. versionadded:: 1.1.0 ''' popup = ObjectProperty(None, allownone=True) '''(internal) Used to store the current popup when it is shown. :data:`popup` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' textinput = ObjectProperty(None) '''(internal) Used to store the current textinput from the popup, and to listen for changes. :data:`popup` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' def on_panel(self, instance, value): if value is None: return self.bind(on_release=self._create_popup) def _dismiss(self, *largs): if self.textinput: self.textinput.focus = False if self.popup: self.popup.dismiss() self.popup = None def _validate(self, instance): self._dismiss() value = self.textinput.selection if not value: return self.value = os.path.realpath(value[0]) def _create_popup(self, instance): # create popup layout content = BoxLayout(orientation='vertical', spacing=5) self.popup = popup = Popup(title=self.title, content=content, size_hint=(None, None), size=(400, 400)) # create the filechooser self.textinput = textinput = FileChooserListView( path=self.value, size_hint=(1, 1), dirselect=True) textinput.bind(on_path=self._validate) self.textinput = textinput # construct the content content.add_widget(textinput) content.add_widget(SettingSpacer()) # 2 buttons are created for accept or cancel the current value btnlayout = BoxLayout(size_hint_y=None, height='50dp', spacing='5dp') btn = Button(text='Ok') btn.bind(on_release=self._validate) btnlayout.add_widget(btn) btn = Button(text='Cancel') btn.bind(on_release=self._dismiss) btnlayout.add_widget(btn) content.add_widget(btnlayout) # all done, open the popup ! popup.open() class SettingNumeric(SettingString): '''Implementation of a numeric setting on top of :class:`SettingString`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a :class:`~kivy.uix.textinput.Textinput` so the user can enter a custom value. ''' def _validate(self, instance): # we know the type just by checking if there is a '.' in the original # value is_float = '.' in str(self.value) self._dismiss() try: if is_float: self.value = text_type(float(self.textinput.text)) else: self.value = text_type(int(self.textinput.text)) except ValueError: return class SettingOptions(SettingItem): '''Implementation of an option list on top of :class:`SettingItem`. It is visualized with a :class:`~kivy.uix.label.Label` widget that, when clicked, will open a :class:`~kivy.uix.popup.Popup` with a list of options from which the user can select. ''' options = ListProperty([]) '''List of all availables options. This must be a list of "string" items. Otherwise, it will crash. :) :data:`options` is a :class:`~kivy.properties.ListProperty`, default to []. ''' popup = ObjectProperty(None, allownone=True) '''(internal) Used to store the current popup when it is shown. :data:`popup` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' def on_panel(self, instance, value): if value is None: return self.bind(on_release=self._create_popup) def _set_option(self, instance): self.value = instance.text self.popup.dismiss() def _create_popup(self, instance): # create the popup content = BoxLayout(orientation='vertical', spacing='5dp') self.popup = popup = Popup(content=content, title=self.title, size_hint=(None, None), size=('400dp', '400dp')) popup.height = len(self.options) * dp(55) + dp(150) # add all the options content.add_widget(Widget(size_hint_y=None, height=1)) uid = str(self.uid) for option in self.options: state = 'down' if option == self.value else 'normal' btn = ToggleButton(text=option, state=state, group=uid) btn.bind(on_release=self._set_option) content.add_widget(btn) # finally, add a cancel button to return on the previous panel content.add_widget(SettingSpacer()) btn = Button(text='Cancel', size_hint_y=None, height=dp(50)) btn.bind(on_release=popup.dismiss) content.add_widget(btn) # and open the popup ! popup.open() class SettingTitle(Label): '''A simple title label, used to organize the settings in sections. ''' title = Label.text class SettingsPanel(GridLayout): '''This class is used to contruct panel settings, for use with a :class:`Settings` instance or subclass. ''' title = StringProperty('Default title') '''Title of the panel. The title will be reused by the :class:`Settings` in the sidebar. ''' config = ObjectProperty(None, allownone=True) '''A :class:`kivy.config.ConfigParser` instance. See module documentation for more information. ''' settings = ObjectProperty(None) '''A :class:`Settings` instance that will be used to fire the `on_config_change` event. ''' def __init__(self, **kwargs): kwargs.setdefault('cols', 1) super(SettingsPanel, self).__init__(**kwargs) def on_config(self, instance, value): if value is None: return if not isinstance(value, ConfigParser): raise Exception('Invalid config object, you must use a' 'kivy.config.ConfigParser, not another one !') def get_value(self, section, key): '''Return the value of the section/key from the :data:`config` ConfigParser instance. This function is used by :class:`SettingItem` to get the value for a given section/key. If you don't want to use a ConfigParser instance, you might want to adapt this function. ''' config = self.config if not config: return return config.get(section, key) def set_value(self, section, key, value): current = self.get_value(section, key) if current == value: return config = self.config if config: config.set(section, key, value) config.write() settings = self.settings if settings: settings.dispatch('on_config_change', config, section, key, value) class InterfaceWithSidebar(BoxLayout): '''The default Settings interface class. It displays a sidebar menu with names of available settings panels, which may be used to switch which one is currently displayed. See :meth:`~InterfaceWithSidebar.add_panel` for information on the method you must implement if creating your own interface. This class also dispatches an event 'on_close', which is triggered when the sidebar menu's close button is released. If creating your own interface widget, it should also dispatch such an event, which will automatically be caught by :class:`Settings` and used to trigger its own on_close event. ''' menu = ObjectProperty() '''(internal) A reference to the sidebar menu widget. :data:`menu` is an :class:`~kivy.properties.ObjectProperty` defaulting to None. ''' content = ObjectProperty() '''(internal) A reference to the panel display widget (a :class:`ContentPanel`). :data:`menu` is an :class:`~kivy.properties.ObjectProperty` defaulting to None. ''' __events__ = ('on_close', ) def __init__(self, *args, **kwargs): super(InterfaceWithSidebar, self).__init__(*args, **kwargs) self.menu.close_button.bind( on_release=lambda j: self.dispatch('on_close')) def add_panel(self, panel, name, uid): '''This method is used by Settings to add new panels for possible display. Any replacement for ContentPanel *must* implement this method. :param panel: A :class:`SettingsPanel`. It should be stored, and the interface should provide a way to switch between panels. :param name: The name of the panel, as a string. It may be used to represent the panel, but isn't necessarily unique. :param uid: A unique int identifying the panel. It should be used to identify and switch between panels. ''' self.menu.add_item(name, uid) self.content.add_panel(panel, name, uid) def on_close(self, *args): pass class InterfaceWithSpinner(BoxLayout): '''A settings interface that displays a spinner at the top for switching between panels. This workings of this class are considered internal and are not documented. See :meth:`InterfaceWithSidebar` for information on implementing your own interface class. ''' __events__ = ('on_close', ) menu = ObjectProperty() '''(internal) A reference to the sidebar menu widget. :data:`menu` is an :class:`~kivy.properties.ObjectProperty` defaulting to None. ''' content = ObjectProperty() '''(internal) A reference to the panel display widget (a :class:`ContentPanel`). :data:`menu` is an :class:`~kivy.properties.ObjectProperty` defaulting to None. ''' def __init__(self, *args, **kwargs): super(InterfaceWithSpinner, self).__init__(*args, **kwargs) self.menu.close_button.bind( on_release=lambda j: self.dispatch('on_close')) def add_panel(self, panel, name, uid): '''This method is used by Settings to add new panels for possible display. Any replacement for ContentPanel *must* implement this method. :param panel: A :class:`SettingsPanel`. It should be stored, and the interface should provide a way to switch between panels. :param name: The name of the panel, as a string. It may be used to represent the panel, but may not be unique. :param uid: A unique int identifying the panel. It should be used to identify and switch between panels. ''' self.content.add_panel(panel, name, uid) self.menu.add_item(name, uid) def on_close(self, *args): pass class ContentPanel(ScrollView): '''A class for displaying settings panels. It displays a single settings panel at a time, taking up the full size and shape of the ContentPanel. It is used by :class:`InterfaceWithSidebar` and :class:`InterfaceWithSpinner` to display settings. ''' panels = DictProperty({}) '''(internal) Stores a dictionary relating settings panels to their uids. :data:`panels` is a :class:`~kivy.properties.DictProperty`, defaulting to {}. ''' container = ObjectProperty() '''(internal) A reference to the GridLayout that actually contains the settings panel. :data:`container` is an :class:`~kivy.properties.ObjectProperty`, defaulting to None. ''' current_panel = ObjectProperty(None) '''(internal) A reference to the current settings panel. :data:`current_panel` is an :class:`~kivy.properties.ObjectProperty`, defaulting to None. ''' current_uid = NumericProperty(0) '''(internal) A reference to the uid of the current settings panel. :data:`current_uid` is a :class:`~kivy.properties.NumericProperty`, defaulting to 0. ''' def add_panel(self, panel, name, uid): '''This method is used by Settings to add new panels for possible display. Any replacement for ContentPanel *must* implement this method. :param panel: A :class:`SettingsPanel`. It should be stored, and displayed when requested. :param name: The name of the panel, as a string. It may be used to represent the panel. :param uid: A unique int identifying the panel. It should be stored and used to identify panels when switching. ''' self.panels[uid] = panel if not self.current_uid: self.current_uid = uid def on_current_uid(self, *args): '''The uid of the currently displayed panel. Changing this will automatically change the displayed panel. :param uid: A panel uid. It should be used to retrieve and display a settings panel that has previously been added with :meth:`add_panel`. ''' uid = self.current_uid if uid in self.panels: if self.current_panel is not None: self.remove_widget(self.current_panel) new_panel = self.panels[uid] self.add_widget(new_panel) self.current_panel = new_panel return True return False # New uid doesn't exist def add_widget(self, widget): if self.container is None: super(ContentPanel, self).add_widget(widget) else: self.container.add_widget(widget) def remove_widget(self, widget): self.container.remove_widget(widget) class Settings(BoxLayout): '''Settings UI. Check module documentation for more information on how to use this class. :Events: `on_config_change`: ConfigParser instance, section, key, value Fired when section/key/value of a ConfigParser changes. `on_close` Fired by the default panel when the Close button is pressed. ''' interface = ObjectProperty(None) '''(internal) Reference to the widget that will contain, organise and display the panel configuration panel widgets. :data:`interface` is a :class:`~kivy.properties.ObjectProperty`, default to None. ''' interface_cls = ObjectProperty(InterfaceWithSidebar) '''The widget class that will be used to display the graphical interface for the settings panel. By default, it displays one settings panel at a time with a sidebar to switch between them. :data:`interface_cls` is a :class:`~kivy.properties.ObjectProperty`, default to :class`InterfaceWithSidebar`. ''' __events__ = ('on_close', 'on_config_change') def __init__(self, *args): self._types = {} super(Settings, self).__init__(*args) self.add_interface() self.register_type('string', SettingString) self.register_type('bool', SettingBoolean) self.register_type('numeric', SettingNumeric) self.register_type('options', SettingOptions) self.register_type('title', SettingTitle) self.register_type('path', SettingPath) def on_touch_down(self, touch): if self.collide_point(*touch.pos): super(Settings, self).on_touch_down(touch) return True def register_type(self, tp, cls): '''Register a new type that can be used in the JSON definition. ''' self._types[tp] = cls def on_close(self, *args): pass def add_interface(self): '''(Internal) creates an instance of :attr:`Settings.interface_cls`, and sets it to :attr:`~Settings.interface`. When json panels are created, they will be added to this interface, which will display them to the user. ''' interface = self.interface_cls() self.interface = interface self.add_widget(interface) self.interface.bind(on_close=lambda j: self.dispatch('on_close')) def on_config_change(self, config, section, key, value): pass def add_json_panel(self, title, config, filename=None, data=None): '''Create and add a new :class:`SettingsPanel` using the configuration `config`, with the JSON definition `filename`. Check the :ref:`settings_json` section in the documentation for more information about JSON format, and the usage of this function. ''' panel = self.create_json_panel(title, config, filename, data) uid = panel.uid if self.interface is not None: self.interface.add_panel(panel, title, uid) def create_json_panel(self, title, config, filename=None, data=None): '''Create new :class:`SettingsPanel`. .. versionadded:: 1.5.0 Check the documentation of :meth:`add_json_panel` for more information. ''' if filename is None and data is None: raise Exception('You must specify either the filename or data') if filename is not None: with open(filename, 'r') as fd: data = json.loads(fd.read()) else: data = json.loads(data) if type(data) != list: raise ValueError('The first element must be a list') panel = SettingsPanel(title=title, settings=self, config=config) for setting in data: # determine the type and the class to use if not 'type' in setting: raise ValueError('One setting are missing the "type" element') ttype = setting['type'] cls = self._types.get(ttype) if cls is None: raise ValueError('No class registered to handle the <%s> type' % setting['type']) # create a instance of the class, without the type attribute del setting['type'] str_settings = {} for key, item in setting.items(): str_settings[str(key)] = item instance = cls(panel=panel, **str_settings) # instance created, add to the panel panel.add_widget(instance) return panel def add_kivy_panel(self): '''Add a panel for configuring Kivy. This panel acts directly on the kivy configuration. Feel free to include or exclude it in your configuration. See :meth:`~kivy.app.App.use_kivy_settings` for information on enabling/disabling the automatic kivy panel. ''' from kivy import kivy_data_dir from kivy.config import Config from os.path import join self.add_json_panel('Kivy', Config, join(kivy_data_dir, 'settings_kivy.json')) class SettingsWithSidebar(Settings): '''A settings widget that displays settings panels with a sidebar to switch between them. This is the default behaviour of :class:`Settings`, and this widget is a trivial wrapper subclass. ''' class SettingsWithSpinner(Settings): '''A settings widget that displays one settings panel at a time with a spinner at the top to switch between them. ''' def __init__(self, *args, **kwargs): self.interface_cls = InterfaceWithSpinner super(SettingsWithSpinner, self).__init__(*args, **kwargs) class SettingsWithTabbedPanel(Settings): '''A settings widget that displays settings panels as pages in a :class:`~kivy.uix.tabbedpanel.TabbedPanel`. ''' __events__ = ('on_close', ) def __init__(self, *args, **kwargs): self.interface_cls = InterfaceWithTabbedPanel super(SettingsWithTabbedPanel, self).__init__(*args, **kwargs) def on_close(self, *args): pass class SettingsWithNoMenu(Settings): '''A settings widget that displays a single settings panel, with *no* Close button. It will not accept more than one settings panel. It is intended for use in programs with few enough settings that a full panel switcher is not useful. .. warning:: This Settings panel does *not* provide a Close button, and so it is impossible to leave the settings screen unless you also add other behaviour or override :meth:`~kivy.app.App.display_settings` and :meth:`~kivy.app.App.close_settings`. ''' def __init__(self, *args, **kwargs): self.interface_cls = InterfaceWithNoMenu super(SettingsWithNoMenu, self).__init__(*args, **kwargs) class InterfaceWithNoMenu(ContentPanel): '''The interface widget used by :class:`SettingsWithNoMenu`. It stores and displays a single settings panel. This widget is considered internal and is not documented. See :class:`ContentPanel` for information on defining your own content widget. ''' def add_widget(self, widget): if self.container is not None and len(self.container.children) > 0: raise Exception('ContentNoMenu cannot accept more than one settings' 'panel') super(InterfaceWithNoMenu, self).add_widget(widget) class InterfaceWithTabbedPanel(FloatLayout): '''The content widget used by :class:`SettingsWithTabbedPanel`. It stores and displays settings panels in tabs of a TabbedPanel. This widget is considered internal and is not documented. See :class:`InterfaceWithSidebar` for information on defining your own interface widget. ''' tabbedpanel = ObjectProperty() close_button = ObjectProperty() __events__ = ('on_close', ) def __init__(self, *args, **kwargs): super(InterfaceWithTabbedPanel, self).__init__(*args, **kwargs) self.close_button.bind(on_release=lambda j: self.dispatch('on_close')) def add_panel(self, panel, name, uid): scrollview = ScrollView() scrollview.add_widget(panel) panelitem = TabbedPanelHeader(text=name, content=scrollview) self.tabbedpanel.add_widget(panelitem) def on_close(self, *args): pass class MenuSpinner(BoxLayout): '''The menu class used by :class:`SettingsWithSpinner`. It provides a sidebar with an entry for each settings panel. This widget is considered internal and is not documented. See :class:`MenuSidebar` for information on menus and creating your own menu class. ''' selected_uid = NumericProperty(0) close_button = ObjectProperty(0) spinner = ObjectProperty() panel_names = DictProperty({}) spinner_text = StringProperty() close_button = ObjectProperty() def add_item(self, name, uid): values = self.spinner.values if name in values: i = 2 while name + ' {}'.format(i) in values: i += 1 name = name + ' {}'.format(i) self.panel_names[name] = uid self.spinner.values.append(name) if not self.spinner.text: self.spinner.text = name def on_spinner_text(self, *args): text = self.spinner_text self.selected_uid = self.panel_names[text] class MenuSidebar(FloatLayout): '''The menu used by :class:`InterfaceWithSidebar`. It provides a sidebar with an entry for each settings panel, which the user may click to select. ''' selected_uid = NumericProperty(0) '''The uid of the currently selected panel. This may be used to switch between displayed panels, e.g. by binding it to the :data:`~ContentPanel.current_uid` of a :class:`ContentPanel`. :data:`selected_uid` is a :class`~kivy.properties.NumericProperty`, default to 0. ''' buttons_layout = ObjectProperty(None) '''(internal) Reference to the GridLayout that contains individual settings panel menu buttons. :data:`buttons_layout` is an :class:`~kivy.properties.ObjectProperty`, default to None. ''' close_button = ObjectProperty(None) '''(internal) Reference to the widget's Close button. :data:`buttons_layout` is an :class:`~kivy.properties.ObjectProperty`, default to None. ''' def add_item(self, name, uid): '''This method is used to add new panels to the menu. :param name: The name (a string) of the panel. It should be used to represent the panel in the menu. :param uid: The name (an int) of the panel. It should be used internally to represent the panel, and used to set self.selected_uid when the panel is changed. ''' label = SettingSidebarLabel(text=name, uid=uid, menu=self) if len(self.buttons_layout.children) == 0: label.selected = True if self.buttons_layout is not None: self.buttons_layout.add_widget(label) def on_selected_uid(self, *args): '''(internal) unselects any currently selected menu buttons, unless they represent the current panel. ''' for button in self.buttons_layout.children: if button.uid != self.selected_uid: button.selected = False class SettingSidebarLabel(Label): # Internal class, not documented. selected = BooleanProperty(False) uid = NumericProperty(0) menu = ObjectProperty(None) def on_touch_down(self, touch): if not self.collide_point(*touch.pos): return self.selected = True self.menu.selected_uid = self.uid if __name__ == '__main__': from kivy.app import App class SettingsApp(App): def build(self): s = Settings() s.add_kivy_panel() s.bind(on_close=self.stop) return s SettingsApp().run()
mit
chouseknecht/ansible
lib/ansible/modules/storage/netapp/_na_cdot_volume.py
59
15187
#!/usr/bin/python # (c) 2017, NetApp, Inc # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['deprecated'], 'supported_by': 'community'} DOCUMENTATION = ''' module: na_cdot_volume short_description: Manage NetApp cDOT volumes extends_documentation_fragment: - netapp.ontap version_added: '2.3' author: Sumit Kumar (@timuster) <sumit4@netapp.com> deprecated: removed_in: '2.11' why: Updated modules released with increased functionality alternative: Use M(na_ontap_volume) instead. description: - Create or destroy volumes on NetApp cDOT options: state: description: - Whether the specified volume should exist or not. required: true choices: ['present', 'absent'] name: description: - The name of the volume to manage. required: true infinite: description: - Set True if the volume is an Infinite Volume. type: bool default: 'no' online: description: - Whether the specified volume is online, or not. type: bool default: 'yes' aggregate_name: description: - The name of the aggregate the flexvol should exist on. Required when C(state=present). size: description: - The size of the volume in (size_unit). Required when C(state=present). size_unit: description: - The unit used to interpret the size parameter. choices: ['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'] default: 'gb' vserver: description: - Name of the vserver to use. required: true junction_path: description: - Junction path where to mount the volume required: false version_added: '2.6' export_policy: description: - Export policy to set for the specified junction path. required: false default: default version_added: '2.6' snapshot_policy: description: - Snapshot policy to set for the specified volume. required: false default: default version_added: '2.6' ''' EXAMPLES = """ - name: Create FlexVol na_cdot_volume: state: present name: ansibleVolume infinite: False aggregate_name: aggr1 size: 20 size_unit: mb vserver: ansibleVServer hostname: "{{ netapp_hostname }}" username: "{{ netapp_username }}" password: "{{ netapp_password }}" junction_path: /ansibleVolume export_policy: all_nfs_networks snapshot_policy: daily - name: Make FlexVol offline na_cdot_volume: state: present name: ansibleVolume infinite: False online: False vserver: ansibleVServer hostname: "{{ netapp_hostname }}" username: "{{ netapp_username }}" password: "{{ netapp_password }}" """ RETURN = """ """ import traceback from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native import ansible.module_utils.netapp as netapp_utils HAS_NETAPP_LIB = netapp_utils.has_netapp_lib() class NetAppCDOTVolume(object): def __init__(self): self._size_unit_map = dict( bytes=1, b=1, kb=1024, mb=1024 ** 2, gb=1024 ** 3, tb=1024 ** 4, pb=1024 ** 5, eb=1024 ** 6, zb=1024 ** 7, yb=1024 ** 8 ) self.argument_spec = netapp_utils.ontap_sf_host_argument_spec() self.argument_spec.update(dict( state=dict(required=True, choices=['present', 'absent']), name=dict(required=True, type='str'), is_infinite=dict(required=False, type='bool', default=False, aliases=['infinite']), is_online=dict(required=False, type='bool', default=True, aliases=['online']), size=dict(type='int'), size_unit=dict(default='gb', choices=['bytes', 'b', 'kb', 'mb', 'gb', 'tb', 'pb', 'eb', 'zb', 'yb'], type='str'), aggregate_name=dict(type='str'), vserver=dict(required=True, type='str', default=None), junction_path=dict(required=False, type='str', default=None), export_policy=dict(required=False, type='str', default='default'), snapshot_policy=dict(required=False, type='str', default='default'), )) self.module = AnsibleModule( argument_spec=self.argument_spec, required_if=[ ('state', 'present', ['aggregate_name', 'size']) ], supports_check_mode=True ) p = self.module.params # set up state variables self.state = p['state'] self.name = p['name'] self.is_infinite = p['is_infinite'] self.is_online = p['is_online'] self.size_unit = p['size_unit'] self.vserver = p['vserver'] self.junction_path = p['junction_path'] self.export_policy = p['export_policy'] self.snapshot_policy = p['snapshot_policy'] if p['size'] is not None: self.size = p['size'] * self._size_unit_map[self.size_unit] else: self.size = None self.aggregate_name = p['aggregate_name'] if HAS_NETAPP_LIB is False: self.module.fail_json(msg="the python NetApp-Lib module is required") else: self.server = netapp_utils.setup_ontap_zapi(module=self.module, vserver=self.vserver) def get_volume(self): """ Return details about the volume :param: name : Name of the volume :return: Details about the volume. None if not found. :rtype: dict """ volume_info = netapp_utils.zapi.NaElement('volume-get-iter') volume_attributes = netapp_utils.zapi.NaElement('volume-attributes') volume_id_attributes = netapp_utils.zapi.NaElement('volume-id-attributes') volume_id_attributes.add_new_child('name', self.name) volume_attributes.add_child_elem(volume_id_attributes) query = netapp_utils.zapi.NaElement('query') query.add_child_elem(volume_attributes) volume_info.add_child_elem(query) result = self.server.invoke_successfully(volume_info, True) return_value = None if result.get_child_by_name('num-records') and \ int(result.get_child_content('num-records')) >= 1: volume_attributes = result.get_child_by_name( 'attributes-list').get_child_by_name( 'volume-attributes') # Get volume's current size volume_space_attributes = volume_attributes.get_child_by_name( 'volume-space-attributes') current_size = volume_space_attributes.get_child_content('size') # Get volume's state (online/offline) volume_state_attributes = volume_attributes.get_child_by_name( 'volume-state-attributes') current_state = volume_state_attributes.get_child_content('state') is_online = None if current_state == "online": is_online = True elif current_state == "offline": is_online = False return_value = { 'name': self.name, 'size': current_size, 'is_online': is_online, } return return_value def create_volume(self): create_parameters = {'volume': self.name, 'containing-aggr-name': self.aggregate_name, 'size': str(self.size), } if self.junction_path: create_parameters['junction-path'] = str(self.junction_path) if self.export_policy != 'default': create_parameters['export-policy'] = str(self.export_policy) if self.snapshot_policy != 'default': create_parameters['snapshot-policy'] = str(self.snapshot_policy) volume_create = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-create', **create_parameters) try: self.server.invoke_successfully(volume_create, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error provisioning volume %s of size %s: %s' % (self.name, self.size, to_native(e)), exception=traceback.format_exc()) def delete_volume(self): if self.is_infinite: volume_delete = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-destroy-async', **{'volume-name': self.name}) else: volume_delete = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-destroy', **{'name': self.name, 'unmount-and-offline': 'true'}) try: self.server.invoke_successfully(volume_delete, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error deleting volume %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def rename_volume(self): """ Rename the volume. Note: 'is_infinite' needs to be set to True in order to rename an Infinite Volume. """ if self.is_infinite: volume_rename = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-rename-async', **{'volume-name': self.name, 'new-volume-name': str( self.name)}) else: volume_rename = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-rename', **{'volume': self.name, 'new-volume-name': str( self.name)}) try: self.server.invoke_successfully(volume_rename, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error renaming volume %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def resize_volume(self): """ Re-size the volume. Note: 'is_infinite' needs to be set to True in order to rename an Infinite Volume. """ if self.is_infinite: volume_resize = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-size-async', **{'volume-name': self.name, 'new-size': str( self.size)}) else: volume_resize = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-size', **{'volume': self.name, 'new-size': str( self.size)}) try: self.server.invoke_successfully(volume_resize, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error re-sizing volume %s: %s' % (self.name, to_native(e)), exception=traceback.format_exc()) def change_volume_state(self): """ Change volume's state (offline/online). Note: 'is_infinite' needs to be set to True in order to change the state of an Infinite Volume. """ state_requested = None if self.is_online: # Requested state is 'online'. state_requested = "online" if self.is_infinite: volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-online-async', **{'volume-name': self.name}) else: volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-online', **{'name': self.name}) else: # Requested state is 'offline'. state_requested = "offline" if self.is_infinite: volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-offline-async', **{'volume-name': self.name}) else: volume_change_state = netapp_utils.zapi.NaElement.create_node_with_children( 'volume-offline', **{'name': self.name}) try: self.server.invoke_successfully(volume_change_state, enable_tunneling=True) except netapp_utils.zapi.NaApiError as e: self.module.fail_json(msg='Error changing the state of volume %s to %s: %s' % (self.name, state_requested, to_native(e)), exception=traceback.format_exc()) def apply(self): changed = False volume_exists = False rename_volume = False resize_volume = False volume_detail = self.get_volume() if volume_detail: volume_exists = True if self.state == 'absent': changed = True elif self.state == 'present': if str(volume_detail['size']) != str(self.size): resize_volume = True changed = True if (volume_detail['is_online'] is not None) and (volume_detail['is_online'] != self.is_online): changed = True if self.is_online is False: # Volume is online, but requested state is offline pass else: # Volume is offline but requested state is online pass else: if self.state == 'present': changed = True if changed: if self.module.check_mode: pass else: if self.state == 'present': if not volume_exists: self.create_volume() else: if resize_volume: self.resize_volume() if volume_detail['is_online'] is not \ None and volume_detail['is_online'] != \ self.is_online: self.change_volume_state() # Ensure re-naming is the last change made. if rename_volume: self.rename_volume() elif self.state == 'absent': self.delete_volume() self.module.exit_json(changed=changed) def main(): v = NetAppCDOTVolume() v.apply() if __name__ == '__main__': main()
gpl-3.0
elfi-dev/elfi
elfi/methods/inference/samplers.py
1
31451
"""This module contains sampling based inference methods.""" __all__ = ['Rejection', 'SMC', 'AdaptiveDistanceSMC', 'AdaptiveThresholdSMC'] import logging from math import ceil import numpy as np import elfi.visualization.interactive as visin from elfi.loader import get_sub_seed from elfi.methods.density_ratio_estimation import (DensityRatioEstimation, calculate_densratio_basis_sigma) from elfi.methods.inference.parameter_inference import ParameterInference from elfi.methods.results import Sample, SmcSample from elfi.methods.utils import (GMDistribution, arr2d_to_batch, weighted_sample_quantile, weighted_var) from elfi.model.elfi_model import AdaptiveDistance from elfi.model.extensions import ModelPrior from elfi.utils import is_array logger = logging.getLogger(__name__) class Sampler(ParameterInference): def sample(self, n_samples, *args, **kwargs): """Sample from the approximate posterior. See the other arguments from the `set_objective` method. Parameters ---------- n_samples : int Number of samples to generate from the (approximate) posterior *args **kwargs Returns ------- result : Sample """ bar = kwargs.pop('bar', True) self.bar = bar return self.infer(n_samples, *args, bar=bar, **kwargs) def _extract_result_kwargs(self): kwargs = super(Sampler, self)._extract_result_kwargs() for state_key in ['threshold', 'accept_rate']: if state_key in self.state: kwargs[state_key] = self.state[state_key] if hasattr(self, 'discrepancy_name'): kwargs['discrepancy_name'] = self.discrepancy_name return kwargs class Rejection(Sampler): """Parallel ABC rejection sampler. For a description of the rejection sampler and a general introduction to ABC, see e.g. Lintusaari et al. 2016. References ---------- Lintusaari J, Gutmann M U, Dutta R, Kaski S, Corander J (2016). Fundamentals and Recent Developments in Approximate Bayesian Computation. Systematic Biology. http://dx.doi.org/10.1093/sysbio/syw077. """ def __init__(self, model, discrepancy_name=None, output_names=None, **kwargs): """Initialize the Rejection sampler. Parameters ---------- model : ElfiModel or NodeReference discrepancy_name : str, NodeReference, optional Only needed if model is an ElfiModel output_names : list, optional Additional outputs from the model to be included in the inference result, e.g. corresponding summaries to the acquired samples kwargs: See ParameterInference """ model, discrepancy_name = self._resolve_model(model, discrepancy_name) output_names = [discrepancy_name] + model.parameter_names + (output_names or []) self.adaptive = isinstance(model[discrepancy_name], AdaptiveDistance) if self.adaptive: model[discrepancy_name].init_adaptation_round() # Summaries are needed as adaptation data self.sums = [sumstat.name for sumstat in model[discrepancy_name].parents] for k in self.sums: if k not in output_names: output_names.append(k) super(Rejection, self).__init__(model, output_names, **kwargs) self.discrepancy_name = discrepancy_name def set_objective(self, n_samples, threshold=None, quantile=None, n_sim=None): """Set objective for inference. Parameters ---------- n_samples : int number of samples to generate threshold : float Acceptance threshold quantile : float In between (0,1). Define the threshold as the p-quantile of all the simulations. n_sim = n_samples/quantile. n_sim : int Total number of simulations. The threshold will be the n_samples-th smallest discrepancy among n_sim simulations. """ if quantile is None and threshold is None and n_sim is None: quantile = .01 self.state = dict(samples=None, threshold=np.Inf, n_sim=0, accept_rate=1, n_batches=0) if quantile: n_sim = ceil(n_samples / quantile) # Set initial n_batches estimate if n_sim: n_batches = ceil(n_sim / self.batch_size) else: n_batches = self.max_parallel_batches self.objective = dict(n_samples=n_samples, threshold=threshold, n_batches=n_batches) # Reset the inference self.batches.reset() def update(self, batch, batch_index): """Update the inference state with a new batch. Parameters ---------- batch : dict dict with `self.outputs` as keys and the corresponding outputs for the batch as values batch_index : int """ super(Rejection, self).update(batch, batch_index) if self.state['samples'] is None: # Lazy initialization of the outputs dict self._init_samples_lazy(batch) self._merge_batch(batch) self._update_state_meta() self._update_objective_n_batches() def extract_result(self): """Extract the result from the current state. Returns ------- result : Sample """ if self.state['samples'] is None: raise ValueError('Nothing to extract') if self.adaptive: self._update_distances() # Take out the correct number of samples outputs = dict() for k, v in self.state['samples'].items(): outputs[k] = v[:self.objective['n_samples']] return Sample(outputs=outputs, **self._extract_result_kwargs()) def _init_samples_lazy(self, batch): """Initialize the outputs dict based on the received batch.""" samples = {} e_noarr = "Node {} output must be in a numpy array of length {} (batch_size)." e_len = "Node {} output has array length {}. It should be equal to the batch size {}." for node in self.output_names: # Check the requested outputs if node not in batch: raise KeyError( "Did not receive outputs for node {}".format(node)) nbatch = batch[node] if not is_array(nbatch): raise ValueError(e_noarr.format(node, self.batch_size)) elif len(nbatch) != self.batch_size: raise ValueError(e_len.format( node, len(nbatch), self.batch_size)) # Prepare samples shape = (self.objective['n_samples'] + self.batch_size, ) + nbatch.shape[1:] dtype = nbatch.dtype if node == self.discrepancy_name: # Initialize the distances to inf samples[node] = np.ones(shape, dtype=dtype) * np.inf else: samples[node] = np.empty(shape, dtype=dtype) self.state['samples'] = samples def _merge_batch(self, batch): # TODO: add index vector so that you can recover the original order samples = self.state['samples'] # Add current batch to adaptation data if self.adaptive: observed_sums = [batch[s] for s in self.sums] self.model[self.discrepancy_name].add_data(*observed_sums) # Check acceptance condition if self.objective.get('threshold') is None: accepted = slice(None, None) num_accepted = self.batch_size else: accepted = batch[self.discrepancy_name] <= self.objective.get('threshold') accepted = np.all(np.atleast_2d(np.transpose(accepted)), axis=0) num_accepted = np.sum(accepted) # Put the acquired samples to the end if num_accepted > 0: for node, v in samples.items(): v[-num_accepted:] = batch[node][accepted] # Sort the smallest to the beginning # note: last (-1) distance measure is used when distance calculation is nested sort_distance = np.atleast_2d(np.transpose(samples[self.discrepancy_name]))[-1] sort_mask = np.argsort(sort_distance) for k, v in samples.items(): v[:] = v[sort_mask] def _update_state_meta(self): """Update `n_sim`, `threshold`, and `accept_rate`.""" o = self.objective s = self.state s['threshold'] = s['samples'][self.discrepancy_name][o['n_samples'] - 1] s['accept_rate'] = min(1, o['n_samples'] / s['n_sim']) def _update_objective_n_batches(self): # Only in the case that the threshold is used if self.objective.get('threshold') is None: return s = self.state t, n_samples = [self.objective.get(k) for k in ('threshold', 'n_samples')] # noinspection PyTypeChecker if s['samples']: accepted = s['samples'][self.discrepancy_name] <= t n_acceptable = np.sum(np.all(np.atleast_2d(np.transpose(accepted)), axis=0)) else: n_acceptable = 0 if n_acceptable == 0: # No acceptable samples found yet, increase n_batches of objective by one in # order to keep simulating n_batches = self.objective['n_batches'] + 1 else: accept_rate_t = n_acceptable / s['n_sim'] # Add some margin to estimated n_batches. One could also use confidence # bounds here margin = .2 * self.batch_size * int(n_acceptable < n_samples) n_batches = (n_samples / accept_rate_t + margin) / self.batch_size n_batches = ceil(n_batches) self.objective['n_batches'] = n_batches logger.debug('Estimated objective n_batches=%d' % self.objective['n_batches']) def _update_distances(self): # Update adaptive distance node self.model[self.discrepancy_name].update_distance() # Recalculate distances in current sample nums = self.objective['n_samples'] data = {s: self.state['samples'][s][:nums] for s in self.sums} ds = self.model[self.discrepancy_name].generate(with_values=data) # Sort based on new distance measure sort_distance = np.atleast_2d(np.transpose(ds))[-1] sort_mask = np.argsort(sort_distance) # Update state self.state['samples'][self.discrepancy_name] = sort_distance for k in self.state['samples'].keys(): if k != self.discrepancy_name: self.state['samples'][k][:nums] = self.state['samples'][k][sort_mask] self._update_state_meta() def plot_state(self, **options): """Plot the current state of the inference algorithm. This feature is still experimental and only supports 1d or 2d cases. """ displays = [] if options.get('interactive'): from IPython import display displays.append( display.HTML('<span>Threshold: {}</span>'.format(self.state['threshold']))) visin.plot_sample( self.state['samples'], nodes=self.parameter_names, n=self.objective['n_samples'], displays=displays, **options) class SMC(Sampler): """Sequential Monte Carlo ABC sampler.""" def __init__(self, model, discrepancy_name=None, output_names=None, **kwargs): """Initialize the SMC-ABC sampler. Parameters ---------- model : ElfiModel or NodeReference discrepancy_name : str, NodeReference, optional Only needed if model is an ElfiModel output_names : list, optional Additional outputs from the model to be included in the inference result, e.g. corresponding summaries to the acquired samples kwargs: See ParameterInference """ model, discrepancy_name = self._resolve_model(model, discrepancy_name) output_names = [discrepancy_name] + model.parameter_names + (output_names or []) super(SMC, self).__init__(model, output_names, **kwargs) self._prior = ModelPrior(self.model) self.discrepancy_name = discrepancy_name self.state['round'] = 0 self._populations = [] self._rejection = None self._round_random_state = None self._quantiles = None def set_objective(self, n_samples, thresholds=None, quantiles=None): """Set objective for ABC-SMC inference. Parameters ---------- n_samples : int Number of samples to generate thresholds : list, optional List of thresholds for ABC-SMC quantiles : list, optional List of selection quantiles used to determine sample thresholds """ if thresholds is None and quantiles is None: raise ValueError("Either thresholds or quantiles is required to run ABC-SMC.") if thresholds is None: rounds = len(quantiles) - 1 else: rounds = len(thresholds) - 1 # Take previous iterations into account in case continued estimation self.state['round'] = len(self._populations) rounds = rounds + self.state['round'] if thresholds is None: thresholds = np.full((rounds+1), None) self._quantiles = np.concatenate((np.full((self.state['round']), None), quantiles)) else: thresholds = np.concatenate((np.full((self.state['round']), None), thresholds)) self.objective.update( dict( n_samples=n_samples, n_batches=self.max_parallel_batches, round=rounds, thresholds=thresholds)) self._init_new_round() self._update_objective() def extract_result(self): """Extract the result from the current state. Returns ------- SmcSample """ # Extract information from the population pop = self._extract_population() self._populations.append(pop) return SmcSample( outputs=pop.outputs, populations=self._populations.copy(), weights=pop.weights, threshold=pop.threshold, **self._extract_result_kwargs()) def update(self, batch, batch_index): """Update the inference state with a new batch. Parameters ---------- batch : dict dict with `self.outputs` as keys and the corresponding outputs for the batch as values batch_index : int """ super(SMC, self).update(batch, batch_index) self._rejection.update(batch, batch_index) if self._rejection.finished: self.batches.cancel_pending() if self.bar: self.progress_bar.update_progressbar(self.progress_bar.scaling + 1, self.progress_bar.scaling + 1) if self.state['round'] < self.objective['round']: self._populations.append(self._extract_population()) self.state['round'] += 1 self._init_new_round() self._update_objective() def prepare_new_batch(self, batch_index): """Prepare values for a new batch. Parameters ---------- batch_index : int next batch_index to be submitted Returns ------- batch : dict or None Keys should match to node names in the model. These values will override any default values or operations in those nodes. """ if self.state['round'] == 0: # Use the actual prior return # Sample from the proposal, condition on actual prior params = GMDistribution.rvs(*self._gm_params, size=self.batch_size, prior_logpdf=self._prior.logpdf, random_state=self._round_random_state) batch = arr2d_to_batch(params, self.parameter_names) return batch def _init_new_round(self): self._set_rejection_round(self.state['round']) if self.state['round'] == 0 and self._quantiles is not None: self._rejection.set_objective( self.objective['n_samples'], quantile=self._quantiles[0]) else: if self._quantiles is not None: self._set_threshold() self._rejection.set_objective( self.objective['n_samples'], threshold=self.current_population_threshold) def _set_rejection_round(self, round): self._update_round_info(self.state['round']) # Get a subseed for this round for ensuring consistent results for the round seed = self.seed if round == 0 else get_sub_seed(self.seed, round) self._round_random_state = np.random.RandomState(seed) self._rejection = Rejection( self.model, discrepancy_name=self.discrepancy_name, output_names=self.output_names, batch_size=self.batch_size, seed=seed, max_parallel_batches=self.max_parallel_batches) def _update_round_info(self, round): if self.bar: reinit_msg = 'ABC-SMC Round {0} / {1}'.format( round + 1, self.objective['round'] + 1) self.progress_bar.reinit_progressbar( scaling=(self.state['n_batches']), reinit_msg=reinit_msg) dashes = '-' * 16 logger.info('%s Starting round %d %s' % (dashes, round, dashes)) def _extract_population(self): sample = self._rejection.extract_result() # Append the sample object sample.method_name = "Rejection within SMC-ABC" means, w, cov = self._compute_weights_means_and_cov(sample) sample.means = means sample.weights = w sample.meta['cov'] = cov return sample def _compute_weights_means_and_cov(self, pop): params = np.column_stack(tuple([pop.outputs[p] for p in self.parameter_names])) if self._populations: q_logpdf = GMDistribution.logpdf(params, *self._gm_params) p_logpdf = self._prior.logpdf(params) w = np.exp(p_logpdf - q_logpdf) else: w = np.ones(pop.n_samples) means = params.copy() if np.count_nonzero(w) == 0: raise RuntimeError("All sample weights are zero. If you are using a prior " "with a bounded support, this may be caused by specifying " "a too small sample size.") # New covariance cov = 2 * np.diag(weighted_var(params, w)) if not np.all(np.isfinite(cov)): logger.warning("Could not estimate the sample covariance. This is often " "caused by majority of the sample weights becoming zero." "Falling back to using unit covariance.") cov = np.diag(np.ones(params.shape[1])) return means, w, cov def _update_objective(self): """Update the objective n_batches.""" n_batches = sum([pop.n_batches for pop in self._populations]) self.objective['n_batches'] = n_batches + \ self._rejection.objective['n_batches'] def _set_threshold(self): previous_population = self._populations[self.state['round']-1] threshold = weighted_sample_quantile( x=previous_population.discrepancies, alpha=self._quantiles[self.state['round']], weights=previous_population.weights) logger.info('ABC-SMC: Selected threshold for next population %.3f' % (threshold)) self.objective['thresholds'][self.state['round']] = threshold @property def _gm_params(self): sample = self._populations[-1] return sample.means, sample.cov, sample.weights @property def current_population_threshold(self): """Return the threshold for current population.""" return self.objective['thresholds'][self.state['round']] class AdaptiveDistanceSMC(SMC): """SMC-ABC sampler with adaptive threshold and distance function. Notes ----- Algorithm 5 in Prangle (2017) References ---------- Prangle D (2017). Adapting the ABC Distance Function. Bayesian Analysis 12(1):289-309, 2017. https://projecteuclid.org/euclid.ba/1460641065 """ def __init__(self, model, discrepancy_name=None, output_names=None, **kwargs): """Initialize the adaptive distance SMC-ABC sampler. Parameters ---------- model : ElfiModel or NodeReference discrepancy_name : str, NodeReference, optional Only needed if model is an ElfiModel output_names : list, optional Additional outputs from the model to be included in the inference result, e.g. corresponding summaries to the acquired samples kwargs: See ParameterInference """ model, discrepancy_name = self._resolve_model(model, discrepancy_name) if not isinstance(model[discrepancy_name], AdaptiveDistance): raise TypeError('This method requires an adaptive distance node.') # Initialise adaptive distance node model[discrepancy_name].init_state() # Add summaries in additional outputs as these are needed to update the distance node sums = [sumstat.name for sumstat in model[discrepancy_name].parents] if output_names is None: output_names = sums else: for k in sums: if k not in output_names: output_names.append(k) super(AdaptiveDistanceSMC, self).__init__(model, discrepancy_name, output_names=output_names, **kwargs) def set_objective(self, n_samples, rounds, quantile=0.5): """Set objective for adaptive distance ABC-SMC inference. Parameters ---------- n_samples : int Number of samples to generate rounds : int, optional Number of populations to sample quantile : float, optional Selection quantile used to determine sample thresholds """ super(AdaptiveDistanceSMC, self).set_objective(ceil(n_samples/quantile), quantiles=[1]*rounds) self.population_size = n_samples self.quantile = quantile def _extract_population(self): # Extract population and metadata based on rejection sample rejection_sample = self._rejection.extract_result() outputs = dict() for k in self.output_names: outputs[k] = rejection_sample.outputs[k][:self.population_size] meta = rejection_sample.meta meta['adaptive_distance_w'] = self.model[self.discrepancy_name].state['w'][-1] meta['threshold'] = max(outputs[self.discrepancy_name]) meta['accept_rate'] = self.population_size/meta['n_sim'] method_name = "Rejection within adaptive distance SMC-ABC" sample = Sample(method_name, outputs, self.parameter_names, **meta) # Append the sample object means, w, cov = self._compute_weights_means_and_cov(sample) sample.means = means sample.weights = w sample.meta['cov'] = cov return sample def _extract_result_kwargs(self): kwargs = super(AdaptiveDistanceSMC, self)._extract_result_kwargs() kwargs['adaptive_distance_w'] = [pop.adaptive_distance_w for pop in self._populations] return kwargs def _set_threshold(self): round = self.state['round'] self.objective['thresholds'][round] = self._populations[round-1].threshold @property def current_population_threshold(self): """Return the threshold for current population.""" return [np.inf] + [pop.threshold for pop in self._populations] class AdaptiveThresholdSMC(SMC): """ABC-SMC sampler with adaptive threshold selection. References ---------- Simola U, Cisewski-Kehe J, Gutmann M U, Corander J (2021). Adaptive Approximate Bayesian Computation Tolerance Selection. Bayesian Analysis. https://doi.org/10.1214/20-BA1211 """ def __init__(self, model, discrepancy_name=None, output_names=None, initial_quantile=0.20, q_threshold=0.99, densratio_estimation=None, **kwargs): """Initialize the adaptive threshold SMC-ABC sampler. Parameters ---------- model : ElfiModel or NodeReference discrepancy_name : str, NodeReference, optional Only needed if model is an ElfiModel output_names : list, optional Additional outputs from the model to be included in the inference result, e.g. corresponding summaries to the acquired samples initial_quantile : float, optional Initial selection quantile for the first round of adaptive-ABC-SMC q_threshold : float, optional Termination criteratia for adaptive-ABC-SMC densratio_estimation : DensityRatioEstimation, optional Density ratio estimation object defining parameters for KLIEP kwargs: See ParameterInference """ model, discrepancy_name = self._resolve_model(model, discrepancy_name) output_names = [discrepancy_name] + model.parameter_names + (output_names or []) super(SMC, self).__init__(model, output_names, **kwargs) self._prior = ModelPrior(self.model) self.discrepancy_name = discrepancy_name self.state['round'] = 0 self._populations = [] self._rejection = None self._round_random_state = None self.q_threshold = q_threshold self.initial_quantile = initial_quantile self.densratio = densratio_estimation or DensityRatioEstimation(n=100, epsilon=0.001, max_iter=200, abs_tol=0.01, fold=5, optimize=False) def set_objective(self, n_samples, max_iter=10): """Set objective for ABC-SMC inference. Parameters ---------- n_samples : int Number of samples to generate thresholds : list, optional List of thresholds for ABC-SMC max_iter : int, optional Maximum number of iterations """ rounds = max_iter - 1 # Take previous iterations into account in case continued estimation self.state['round'] = len(self._populations) rounds = rounds + self.state['round'] # Initialise threshold selection and adaptive quantile thresholds = np.full((rounds+1), None) self._quantiles = np.full((rounds+1), None) self._quantiles[0] = self.initial_quantile self.objective.update( dict( n_samples=n_samples, n_batches=self.max_parallel_batches, round=rounds, thresholds=thresholds)) self._init_new_round() self._update_objective() def update(self, batch, batch_index): """Update the inference state with a new batch. Parameters ---------- batch : dict dict with `self.outputs` as keys and the corresponding outputs for the batch as values batch_index : int """ super(SMC, self).update(batch, batch_index) self._rejection.update(batch, batch_index) if self._rejection.finished: self.batches.cancel_pending() if self.bar: self.progress_bar.update_progressbar(self.progress_bar.scaling + 1, self.progress_bar.scaling + 1) self._new_population = self._extract_population() if self.state['round'] < self.objective['round']: self._set_adaptive_quantile() if self._quantiles[self.state['round']+1] < self.q_threshold: self._populations.append(self._new_population) self.state['round'] += 1 self._init_new_round() self._update_objective() def _set_adaptive_quantile(self): """Set adaptively the new threshold for current population.""" logger.info("ABC-SMC: Adapting quantile threshold...") sample_data_current = self._resolve_sample(backwards_index=0) sample_data_previous = self._resolve_sample(backwards_index=-1) if self.densratio.optimize: sigma = list(10.0 ** np.arange(-1, 6)) else: sigma = calculate_densratio_basis_sigma(sample_data_current['sigma_max'], sample_data_previous['sigma_max']) self.densratio.fit(x=sample_data_current['samples'], y=sample_data_previous['samples'], weights_x=sample_data_current['weights'], weights_y=sample_data_previous['weights'], sigma=sigma) max_value = self.densratio.max_ratio() max_value = 1.0 if max_value < 1.0 else max_value self._quantiles[self.state['round']+1] = max(1 / max_value, 0.05) logger.info('ABC-SMC: Estimated maximum density ratio %.5f' % (1 / max_value)) def _resolve_sample(self, backwards_index): """Get properties of the samples used in ratio estimation.""" if self.state['round'] + backwards_index < 0: return self._densityratio_initial_sample() elif backwards_index == 0: sample = self._new_population else: sample = self._populations[backwards_index] weights = sample.weights samples = sample.samples_array sample_sigma = np.sqrt(np.diag(sample.cov)) sigma_max = np.min(sample_sigma) sample_data = dict(samples=samples, weights=weights, sigma_max=sigma_max) return sample_data def _densityratio_initial_sample(self): n_samples = self._new_population.weights.shape[0] samples = self._prior.rvs(size=n_samples, random_state=self._round_random_state) weights = np.ones(n_samples) sample_cov = np.atleast_2d(np.cov(samples.reshape(n_samples, -1), rowvar=False)) sigma_max = np.min(np.sqrt(np.diag(sample_cov))) return dict(samples=samples, weights=weights, sigma_max=sigma_max)
bsd-3-clause
espadrine/opera
chromium/src/third_party/WebKit/Source/core/scripts/license.py
5
3270
# Copyright (C) 2013 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # FIXME: We should either not use license blocks in generated files # or we should read this from some central license file. def license_for_generated_cpp(): return """/* * Copyright (C) 2013 Google Inc. All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are * met: * * * Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * * Redistributions in binary form must reproduce the above * copyright notice, this list of conditions and the following disclaimer * in the documentation and/or other materials provided with the * distribution. * * Neither the name of Google Inc. nor the names of its * contributors may be used to endorse or promote products derived from * this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ """
bsd-3-clause
abhiQmar/servo
tests/wpt/web-platform-tests/tools/pytest/_pytest/main.py
171
26357
""" core implementation of testing process: init, session, runtest loop. """ import imp import os import re import sys import _pytest import _pytest._code import py import pytest try: from collections import MutableMapping as MappingMixin except ImportError: from UserDict import DictMixin as MappingMixin from _pytest.runner import collect_one_node tracebackcutdir = py.path.local(_pytest.__file__).dirpath() # exitcodes for the command line EXIT_OK = 0 EXIT_TESTSFAILED = 1 EXIT_INTERRUPTED = 2 EXIT_INTERNALERROR = 3 EXIT_USAGEERROR = 4 EXIT_NOTESTSCOLLECTED = 5 name_re = re.compile("^[a-zA-Z_]\w*$") def pytest_addoption(parser): parser.addini("norecursedirs", "directory patterns to avoid for recursion", type="args", default=['.*', 'CVS', '_darcs', '{arch}', '*.egg']) parser.addini("testpaths", "directories to search for tests when no files or directories are given in the command line.", type="args", default=[]) #parser.addini("dirpatterns", # "patterns specifying possible locations of test files", # type="linelist", default=["**/test_*.txt", # "**/test_*.py", "**/*_test.py"] #) group = parser.getgroup("general", "running and selection options") group._addoption('-x', '--exitfirst', action="store_true", default=False, dest="exitfirst", help="exit instantly on first error or failed test."), group._addoption('--maxfail', metavar="num", action="store", type=int, dest="maxfail", default=0, help="exit after first num failures or errors.") group._addoption('--strict', action="store_true", help="run pytest in strict mode, warnings become errors.") group._addoption("-c", metavar="file", type=str, dest="inifilename", help="load configuration from `file` instead of trying to locate one of the implicit configuration files.") group = parser.getgroup("collect", "collection") group.addoption('--collectonly', '--collect-only', action="store_true", help="only collect tests, don't execute them."), group.addoption('--pyargs', action="store_true", help="try to interpret all arguments as python packages.") group.addoption("--ignore", action="append", metavar="path", help="ignore path during collection (multi-allowed).") # when changing this to --conf-cut-dir, config.py Conftest.setinitial # needs upgrading as well group.addoption('--confcutdir', dest="confcutdir", default=None, metavar="dir", help="only load conftest.py's relative to specified dir.") group.addoption('--noconftest', action="store_true", dest="noconftest", default=False, help="Don't load any conftest.py files.") group = parser.getgroup("debugconfig", "test session debugging and configuration") group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", help="base temporary directory for this test run.") def pytest_namespace(): collect = dict(Item=Item, Collector=Collector, File=File, Session=Session) return dict(collect=collect) def pytest_configure(config): pytest.config = config # compatibiltiy if config.option.exitfirst: config.option.maxfail = 1 def wrap_session(config, doit): """Skeleton command line program""" session = Session(config) session.exitstatus = EXIT_OK initstate = 0 try: try: config._do_configure() initstate = 1 config.hook.pytest_sessionstart(session=session) initstate = 2 session.exitstatus = doit(config, session) or 0 except pytest.UsageError: raise except KeyboardInterrupt: excinfo = _pytest._code.ExceptionInfo() config.hook.pytest_keyboard_interrupt(excinfo=excinfo) session.exitstatus = EXIT_INTERRUPTED except: excinfo = _pytest._code.ExceptionInfo() config.notify_exception(excinfo, config.option) session.exitstatus = EXIT_INTERNALERROR if excinfo.errisinstance(SystemExit): sys.stderr.write("mainloop: caught Spurious SystemExit!\n") finally: excinfo = None # Explicitly break reference cycle. session.startdir.chdir() if initstate >= 2: config.hook.pytest_sessionfinish( session=session, exitstatus=session.exitstatus) config._ensure_unconfigure() return session.exitstatus def pytest_cmdline_main(config): return wrap_session(config, _main) def _main(config, session): """ default command line protocol for initialization, session, running tests and reporting. """ config.hook.pytest_collection(session=session) config.hook.pytest_runtestloop(session=session) if session.testsfailed: return EXIT_TESTSFAILED elif session.testscollected == 0: return EXIT_NOTESTSCOLLECTED def pytest_collection(session): return session.perform_collect() def pytest_runtestloop(session): if session.config.option.collectonly: return True def getnextitem(i): # this is a function to avoid python2 # keeping sys.exc_info set when calling into a test # python2 keeps sys.exc_info till the frame is left try: return session.items[i+1] except IndexError: return None for i, item in enumerate(session.items): nextitem = getnextitem(i) item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) if session.shouldstop: raise session.Interrupted(session.shouldstop) return True def pytest_ignore_collect(path, config): p = path.dirpath() ignore_paths = config._getconftest_pathlist("collect_ignore", path=p) ignore_paths = ignore_paths or [] excludeopt = config.getoption("ignore") if excludeopt: ignore_paths.extend([py.path.local(x) for x in excludeopt]) return path in ignore_paths class FSHookProxy: def __init__(self, fspath, pm, remove_mods): self.fspath = fspath self.pm = pm self.remove_mods = remove_mods def __getattr__(self, name): x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) self.__dict__[name] = x return x def compatproperty(name): def fget(self): # deprecated - use pytest.name return getattr(pytest, name) return property(fget) class NodeKeywords(MappingMixin): def __init__(self, node): self.node = node self.parent = node.parent self._markers = {node.name: True} def __getitem__(self, key): try: return self._markers[key] except KeyError: if self.parent is None: raise return self.parent.keywords[key] def __setitem__(self, key, value): self._markers[key] = value def __delitem__(self, key): raise ValueError("cannot delete key in keywords dict") def __iter__(self): seen = set(self._markers) if self.parent is not None: seen.update(self.parent.keywords) return iter(seen) def __len__(self): return len(self.__iter__()) def keys(self): return list(self) def __repr__(self): return "<NodeKeywords for node %s>" % (self.node, ) class Node(object): """ base class for Collector and Item the test collection tree. Collector subclasses have children, Items are terminal nodes.""" def __init__(self, name, parent=None, config=None, session=None): #: a unique name within the scope of the parent node self.name = name #: the parent collector node. self.parent = parent #: the pytest config object self.config = config or parent.config #: the session this node is part of self.session = session or parent.session #: filesystem path where this node was collected from (can be None) self.fspath = getattr(parent, 'fspath', None) #: keywords/markers collected from all scopes self.keywords = NodeKeywords(self) #: allow adding of extra keywords to use for matching self.extra_keyword_matches = set() # used for storing artificial fixturedefs for direct parametrization self._name2pseudofixturedef = {} @property def ihook(self): """ fspath sensitive hook proxy used to call pytest hooks""" return self.session.gethookproxy(self.fspath) Module = compatproperty("Module") Class = compatproperty("Class") Instance = compatproperty("Instance") Function = compatproperty("Function") File = compatproperty("File") Item = compatproperty("Item") def _getcustomclass(self, name): cls = getattr(self, name) if cls != getattr(pytest, name): py.log._apiwarn("2.0", "use of node.%s is deprecated, " "use pytest_pycollect_makeitem(...) to create custom " "collection nodes" % name) return cls def __repr__(self): return "<%s %r>" %(self.__class__.__name__, getattr(self, 'name', None)) def warn(self, code, message): """ generate a warning with the given code and message for this item. """ assert isinstance(code, str) fslocation = getattr(self, "location", None) if fslocation is None: fslocation = getattr(self, "fspath", None) else: fslocation = "%s:%s" % fslocation[:2] self.ihook.pytest_logwarning.call_historic(kwargs=dict( code=code, message=message, nodeid=self.nodeid, fslocation=fslocation)) # methods for ordering nodes @property def nodeid(self): """ a ::-separated string denoting its collection tree address. """ try: return self._nodeid except AttributeError: self._nodeid = x = self._makeid() return x def _makeid(self): return self.parent.nodeid + "::" + self.name def __hash__(self): return hash(self.nodeid) def setup(self): pass def teardown(self): pass def _memoizedcall(self, attrname, function): exattrname = "_ex_" + attrname failure = getattr(self, exattrname, None) if failure is not None: py.builtin._reraise(failure[0], failure[1], failure[2]) if hasattr(self, attrname): return getattr(self, attrname) try: res = function() except py.builtin._sysex: raise except: failure = sys.exc_info() setattr(self, exattrname, failure) raise setattr(self, attrname, res) return res def listchain(self): """ return list of all parent collectors up to self, starting from root of collection tree. """ chain = [] item = self while item is not None: chain.append(item) item = item.parent chain.reverse() return chain def add_marker(self, marker): """ dynamically add a marker object to the node. ``marker`` can be a string or pytest.mark.* instance. """ from _pytest.mark import MarkDecorator if isinstance(marker, py.builtin._basestring): marker = MarkDecorator(marker) elif not isinstance(marker, MarkDecorator): raise ValueError("is not a string or pytest.mark.* Marker") self.keywords[marker.name] = marker def get_marker(self, name): """ get a marker object from this node or None if the node doesn't have a marker with that name. """ val = self.keywords.get(name, None) if val is not None: from _pytest.mark import MarkInfo, MarkDecorator if isinstance(val, (MarkDecorator, MarkInfo)): return val def listextrakeywords(self): """ Return a set of all extra keywords in self and any parents.""" extra_keywords = set() item = self for item in self.listchain(): extra_keywords.update(item.extra_keyword_matches) return extra_keywords def listnames(self): return [x.name for x in self.listchain()] def addfinalizer(self, fin): """ register a function to be called when this node is finalized. This method can only be called when this node is active in a setup chain, for example during self.setup(). """ self.session._setupstate.addfinalizer(fin, self) def getparent(self, cls): """ get the next parent node (including ourself) which is an instance of the given class""" current = self while current and not isinstance(current, cls): current = current.parent return current def _prunetraceback(self, excinfo): pass def _repr_failure_py(self, excinfo, style=None): fm = self.session._fixturemanager if excinfo.errisinstance(fm.FixtureLookupError): return excinfo.value.formatrepr() tbfilter = True if self.config.option.fulltrace: style="long" else: self._prunetraceback(excinfo) tbfilter = False # prunetraceback already does it if style == "auto": style = "long" # XXX should excinfo.getrepr record all data and toterminal() process it? if style is None: if self.config.option.tbstyle == "short": style = "short" else: style = "long" return excinfo.getrepr(funcargs=True, showlocals=self.config.option.showlocals, style=style, tbfilter=tbfilter) repr_failure = _repr_failure_py class Collector(Node): """ Collector instances create children through collect() and thus iteratively build a tree. """ class CollectError(Exception): """ an error during collection, contains a custom message. """ def collect(self): """ returns a list of children (items and collectors) for this collection node. """ raise NotImplementedError("abstract") def repr_failure(self, excinfo): """ represent a collection failure. """ if excinfo.errisinstance(self.CollectError): exc = excinfo.value return str(exc.args[0]) return self._repr_failure_py(excinfo, style="short") def _memocollect(self): """ internal helper method to cache results of calling collect(). """ return self._memoizedcall('_collected', lambda: list(self.collect())) def _prunetraceback(self, excinfo): if hasattr(self, 'fspath'): traceback = excinfo.traceback ntraceback = traceback.cut(path=self.fspath) if ntraceback == traceback: ntraceback = ntraceback.cut(excludepath=tracebackcutdir) excinfo.traceback = ntraceback.filter() class FSCollector(Collector): def __init__(self, fspath, parent=None, config=None, session=None): fspath = py.path.local(fspath) # xxx only for test_resultlog.py? name = fspath.basename if parent is not None: rel = fspath.relto(parent.fspath) if rel: name = rel name = name.replace(os.sep, "/") super(FSCollector, self).__init__(name, parent, config, session) self.fspath = fspath def _makeid(self): relpath = self.fspath.relto(self.config.rootdir) if os.sep != "/": relpath = relpath.replace(os.sep, "/") return relpath class File(FSCollector): """ base class for collecting tests from a file. """ class Item(Node): """ a basic test invocation item. Note that for a single function there might be multiple test invocation items. """ nextitem = None def __init__(self, name, parent=None, config=None, session=None): super(Item, self).__init__(name, parent, config, session) self._report_sections = [] def add_report_section(self, when, key, content): if content: self._report_sections.append((when, key, content)) def reportinfo(self): return self.fspath, None, "" @property def location(self): try: return self._location except AttributeError: location = self.reportinfo() # bestrelpath is a quite slow function cache = self.config.__dict__.setdefault("_bestrelpathcache", {}) try: fspath = cache[location[0]] except KeyError: fspath = self.session.fspath.bestrelpath(location[0]) cache[location[0]] = fspath location = (fspath, location[1], str(location[2])) self._location = location return location class NoMatch(Exception): """ raised if matching cannot locate a matching names. """ class Interrupted(KeyboardInterrupt): """ signals an interrupted test run. """ __module__ = 'builtins' # for py3 class Session(FSCollector): Interrupted = Interrupted def __init__(self, config): FSCollector.__init__(self, config.rootdir, parent=None, config=config, session=self) self._fs2hookproxy = {} self.testsfailed = 0 self.testscollected = 0 self.shouldstop = False self.trace = config.trace.root.get("collection") self._norecursepatterns = config.getini("norecursedirs") self.startdir = py.path.local() self.config.pluginmanager.register(self, name="session") def _makeid(self): return "" @pytest.hookimpl(tryfirst=True) def pytest_collectstart(self): if self.shouldstop: raise self.Interrupted(self.shouldstop) @pytest.hookimpl(tryfirst=True) def pytest_runtest_logreport(self, report): if report.failed and not hasattr(report, 'wasxfail'): self.testsfailed += 1 maxfail = self.config.getvalue("maxfail") if maxfail and self.testsfailed >= maxfail: self.shouldstop = "stopping after %d failures" % ( self.testsfailed) pytest_collectreport = pytest_runtest_logreport def isinitpath(self, path): return path in self._initialpaths def gethookproxy(self, fspath): try: return self._fs2hookproxy[fspath] except KeyError: # check if we have the common case of running # hooks with all conftest.py filesall conftest.py pm = self.config.pluginmanager my_conftestmodules = pm._getconftestmodules(fspath) remove_mods = pm._conftest_plugins.difference(my_conftestmodules) if remove_mods: # one or more conftests are not in use at this fspath proxy = FSHookProxy(fspath, pm, remove_mods) else: # all plugis are active for this fspath proxy = self.config.hook self._fs2hookproxy[fspath] = proxy return proxy def perform_collect(self, args=None, genitems=True): hook = self.config.hook try: items = self._perform_collect(args, genitems) hook.pytest_collection_modifyitems(session=self, config=self.config, items=items) finally: hook.pytest_collection_finish(session=self) self.testscollected = len(items) return items def _perform_collect(self, args, genitems): if args is None: args = self.config.args self.trace("perform_collect", self, args) self.trace.root.indent += 1 self._notfound = [] self._initialpaths = set() self._initialparts = [] self.items = items = [] for arg in args: parts = self._parsearg(arg) self._initialparts.append(parts) self._initialpaths.add(parts[0]) rep = collect_one_node(self) self.ihook.pytest_collectreport(report=rep) self.trace.root.indent -= 1 if self._notfound: errors = [] for arg, exc in self._notfound: line = "(no name %r in any of %r)" % (arg, exc.args[0]) errors.append("not found: %s\n%s" % (arg, line)) #XXX: test this raise pytest.UsageError(*errors) if not genitems: return rep.result else: if rep.passed: for node in rep.result: self.items.extend(self.genitems(node)) return items def collect(self): for parts in self._initialparts: arg = "::".join(map(str, parts)) self.trace("processing argument", arg) self.trace.root.indent += 1 try: for x in self._collect(arg): yield x except NoMatch: # we are inside a make_report hook so # we cannot directly pass through the exception self._notfound.append((arg, sys.exc_info()[1])) self.trace.root.indent -= 1 def _collect(self, arg): names = self._parsearg(arg) path = names.pop(0) if path.check(dir=1): assert not names, "invalid arg %r" %(arg,) for path in path.visit(fil=lambda x: x.check(file=1), rec=self._recurse, bf=True, sort=True): for x in self._collectfile(path): yield x else: assert path.check(file=1) for x in self.matchnodes(self._collectfile(path), names): yield x def _collectfile(self, path): ihook = self.gethookproxy(path) if not self.isinitpath(path): if ihook.pytest_ignore_collect(path=path, config=self.config): return () return ihook.pytest_collect_file(path=path, parent=self) def _recurse(self, path): ihook = self.gethookproxy(path.dirpath()) if ihook.pytest_ignore_collect(path=path, config=self.config): return for pat in self._norecursepatterns: if path.check(fnmatch=pat): return False ihook = self.gethookproxy(path) ihook.pytest_collect_directory(path=path, parent=self) return True def _tryconvertpyarg(self, x): mod = None path = [os.path.abspath('.')] + sys.path for name in x.split('.'): # ignore anything that's not a proper name here # else something like --pyargs will mess up '.' # since imp.find_module will actually sometimes work for it # but it's supposed to be considered a filesystem path # not a package if name_re.match(name) is None: return x try: fd, mod, type_ = imp.find_module(name, path) except ImportError: return x else: if fd is not None: fd.close() if type_[2] != imp.PKG_DIRECTORY: path = [os.path.dirname(mod)] else: path = [mod] return mod def _parsearg(self, arg): """ return (fspath, names) tuple after checking the file exists. """ arg = str(arg) if self.config.option.pyargs: arg = self._tryconvertpyarg(arg) parts = str(arg).split("::") relpath = parts[0].replace("/", os.sep) path = self.config.invocation_dir.join(relpath, abs=True) if not path.check(): if self.config.option.pyargs: msg = "file or package not found: " else: msg = "file not found: " raise pytest.UsageError(msg + arg) parts[0] = path return parts def matchnodes(self, matching, names): self.trace("matchnodes", matching, names) self.trace.root.indent += 1 nodes = self._matchnodes(matching, names) num = len(nodes) self.trace("matchnodes finished -> ", num, "nodes") self.trace.root.indent -= 1 if num == 0: raise NoMatch(matching, names[:1]) return nodes def _matchnodes(self, matching, names): if not matching or not names: return matching name = names[0] assert name nextnames = names[1:] resultnodes = [] for node in matching: if isinstance(node, pytest.Item): if not names: resultnodes.append(node) continue assert isinstance(node, pytest.Collector) rep = collect_one_node(node) if rep.passed: has_matched = False for x in rep.result: # TODO: remove parametrized workaround once collection structure contains parametrization if x.name == name or x.name.split("[")[0] == name: resultnodes.extend(self.matchnodes([x], nextnames)) has_matched = True # XXX accept IDs that don't have "()" for class instances if not has_matched and len(rep.result) == 1 and x.name == "()": nextnames.insert(0, name) resultnodes.extend(self.matchnodes([x], nextnames)) node.ihook.pytest_collectreport(report=rep) return resultnodes def genitems(self, node): self.trace("genitems", node) if isinstance(node, pytest.Item): node.ihook.pytest_itemcollected(item=node) yield node else: assert isinstance(node, pytest.Collector) rep = collect_one_node(node) if rep.passed: for subnode in rep.result: for x in self.genitems(subnode): yield x node.ihook.pytest_collectreport(report=rep)
mpl-2.0
yufengg/tensorflow
tensorflow/python/layers/normalization.py
14
25270
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================= # pylint: disable=unused-import,g-bad-import-order """Contains the normalization layer classes and their functional aliases. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import six from six.moves import xrange # pylint: disable=redefined-builtin import numpy as np from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_shape from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import nn from tensorflow.python.ops import math_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import standard_ops from tensorflow.python.ops import variable_scope as vs from tensorflow.python.training import moving_averages from tensorflow.python.framework import tensor_util from tensorflow.python.ops import variables from tensorflow.python.layers import base from tensorflow.python.layers import utils class BatchNormalization(base.Layer): """Batch Normalization layer from http://arxiv.org/abs/1502.03167. "Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift" Sergey Ioffe, Christian Szegedy Arguments: axis: Integer, the axis that should be normalized (typically the features axis). For instance, after a `Conv2D` layer with `data_format="channels_first"`, set `axis=1` in `BatchNormalization`. momentum: Momentum for the moving average. epsilon: Small float added to variance to avoid dividing by zero. center: If True, add offset of `beta` to normalized tensor. If False, `beta` is ignored. scale: If True, multiply by `gamma`. If False, `gamma` is not used. When the next layer is linear (also e.g. `nn.relu`), this can be disabled since the scaling can be done by the next layer. beta_initializer: Initializer for the beta weight. gamma_initializer: Initializer for the gamma weight. moving_mean_initializer: Initializer for the moving mean. moving_variance_initializer: Initializer for the moving variance. beta_regularizer: Optional regularizer for the beta weight. gamma_regularizer: Optional regularizer for the gamma weight. renorm: Whether to use Batch Renormalization (https://arxiv.org/abs/1702.03275). This adds extra variables during training. The inference is the same for either value of this parameter. renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to scalar `Tensors` used to clip the renorm correction. The correction `(r, d)` is used as `corrected_value = normalized_value * r + d`, with `r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin, dmax are set to inf, 0, inf, respectively. renorm_momentum: Momentum used to update the moving means and standard deviations with renorm. Unlike `momentum`, this affects training and should be neither too small (which would add noise) nor too large (which would give stale estimates). Note that `momentum` is still applied to get the means and variances for inference. fused: if `True`, use a faster, fused implementation based on nn.fused_batch_norm. If `None`, use the fused implementation if possible. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable). name: A string, the name of the layer. """ def __init__(self, axis=-1, momentum=0.99, epsilon=1e-3, center=True, scale=True, beta_initializer=init_ops.zeros_initializer(), gamma_initializer=init_ops.ones_initializer(), moving_mean_initializer=init_ops.zeros_initializer(), moving_variance_initializer=init_ops.ones_initializer(), beta_regularizer=None, gamma_regularizer=None, renorm=False, renorm_clipping=None, renorm_momentum=0.99, fused=False, trainable=True, name=None, **kwargs): super(BatchNormalization, self).__init__( name=name, trainable=trainable, **kwargs) self.axis = axis self.momentum = momentum self.epsilon = epsilon self.center = center self.scale = scale self.beta_initializer = beta_initializer self.gamma_initializer = gamma_initializer self.moving_mean_initializer = moving_mean_initializer self.moving_variance_initializer = moving_variance_initializer self.beta_regularizer = beta_regularizer self.gamma_regularizer = gamma_regularizer self.renorm = renorm self.fused = fused if self.fused and renorm: raise ValueError( 'Batch renorm is currently not supported with fused batch norm.') if self.fused and (beta_regularizer is not None or gamma_regularizer is not None): raise ValueError('Regularizers are not currently ' 'supported for fused batch norm.') if renorm: renorm_clipping = renorm_clipping or {} keys = ['rmax', 'rmin', 'dmax'] if set(renorm_clipping) - set(keys): raise ValueError('renorm_clipping %s contains keys not in %s' % (renorm_clipping, keys)) self.renorm_clipping = renorm_clipping self.renorm_momentum = renorm_momentum def build(self, input_shape): input_shape = tensor_shape.TensorShape(input_shape) if not input_shape.ndims: raise ValueError('Input has undefined rank:', input_shape) ndim = len(input_shape) # TODO(yaozhang): if input is not 4D, reshape it to 4D and reshape the # output back to its original shape accordingly. if self.fused and ndim != 4: raise ValueError( 'Only 4D inputs are currently supported with fused batch norm. ' 'Consider reshaping the input to 4D and reshape the output back ' 'to its original shape. Got input rank: ', ndim) if self.axis < 0: axis = ndim + self.axis else: axis = self.axis if axis < 0 or axis >= ndim: raise ValueError('Value of `axis` argument ' + str(self.axis) + ' is out of range for input with rank ' + str(ndim)) if self.fused is None: # Currently fused batch norm doesn't support renorm and beta/gamma # regularizer; and only supports an input tensor of rank 4 and a channel # dimension on axis 1 and 3. self.fused = not self.renorm and ndim == 4 and axis in [ 1, 3 ] and self.beta_regularizer is None and self.gamma_regularizer is None if self.fused: if axis == 1: self._data_format = 'NCHW' elif axis == 3: self._data_format = 'NHWC' else: raise ValueError( 'Only axis 1 and 3 are currently supported dimensions for ' 'fused batch norm. Got `axis` dimension: ', axis) param_dim = input_shape[axis] if not param_dim.value: raise ValueError('Input has undefined `axis` dimension. Input shape: ', input_shape) self.input_spec = base.InputSpec(ndim=ndim, axes={self.axis: param_dim.value}) if self.center: self.beta = self.add_variable(name='beta', shape=(param_dim,), initializer=self.beta_initializer, regularizer=self.beta_regularizer, trainable=True) else: self.beta = None if self.fused: self._beta_const = array_ops.constant(0.0, shape=(param_dim,)) if self.scale: self.gamma = self.add_variable(name='gamma', shape=(param_dim,), initializer=self.gamma_initializer, regularizer=self.gamma_regularizer, trainable=True) else: self.gamma = None if self.fused: self._gamma_const = array_ops.constant(1.0, shape=(param_dim,)) # Disable variable partitioning when creating the moving mean and variance partitioner = self._scope.partitioner try: self._scope.set_partitioner(None) self.moving_mean = self.add_variable( name='moving_mean', shape=(param_dim,), initializer=self.moving_mean_initializer, trainable=False) self.moving_variance = self.add_variable( name='moving_variance', shape=(param_dim,), initializer=self.moving_variance_initializer, trainable=False) if self.renorm: # Create variables to maintain the moving mean and standard deviation. # These are used in training and thus are different from the moving # averages above. The renorm variables are colocated with moving_mean # and moving_variance. # NOTE: below, the outer `with device` block causes the current device # stack to be cleared. The nested ones use a `lambda` to set the desired # device and ignore any devices that may be set by the custom getter. def _renorm_variable(name, shape): var = self.add_variable(name=name, shape=shape, initializer=init_ops.zeros_initializer(), trainable=False) return var with ops.device(None): with ops.device(lambda _: self.moving_mean.device): self.renorm_mean = _renorm_variable('renorm_mean', (param_dim,)) self.renorm_mean_weight = _renorm_variable('renorm_mean_weight', ()) # We initialize renorm_stddev to 0, and maintain the (0-initialized) # renorm_stddev_weight. This allows us to (1) mix the average # stddev with the minibatch stddev early in training, and (2) compute # the unbiased average stddev by dividing renorm_stddev by the weight. with ops.device(lambda _: self.moving_variance.device): self.renorm_stddev = _renorm_variable('renorm_stddev', (param_dim,)) self.renorm_stddev_weight = _renorm_variable( 'renorm_stddev_weight', ()) finally: self._scope.set_partitioner(partitioner) self.built = True def _fused_batch_norm(self, inputs, training): """Returns the output of fused batch norm.""" beta = self.beta if self.center else self._beta_const gamma = self.gamma if self.scale else self._gamma_const def _fused_batch_norm_training(): return nn.fused_batch_norm( inputs, gamma, beta, epsilon=self.epsilon, data_format=self._data_format) def _fused_batch_norm_inference(): return nn.fused_batch_norm( inputs, gamma, beta, mean=self.moving_mean, variance=self.moving_variance, epsilon=self.epsilon, is_training=False, data_format=self._data_format) output, mean, variance = utils.smart_cond( training, _fused_batch_norm_training, _fused_batch_norm_inference) training_value = utils.constant_value(training) if training_value is not False: decay = _smart_select(training, lambda: self.momentum, lambda: 1.) mean_update = moving_averages.assign_moving_average( self.moving_mean, mean, decay, zero_debias=False) variance_update = moving_averages.assign_moving_average( self.moving_variance, variance, decay, zero_debias=False) self.add_update(mean_update, inputs=inputs) self.add_update(variance_update, inputs=inputs) return output def _renorm_correction_and_moments(self, mean, variance, training): """Returns the correction and update values for renorm.""" stddev = math_ops.sqrt(variance + self.epsilon) # Compute the average mean and standard deviation, as if they were # initialized with this batch's moments. mixed_renorm_mean = (self.renorm_mean + (1. - self.renorm_mean_weight) * mean) mixed_renorm_stddev = (self.renorm_stddev + (1. - self.renorm_stddev_weight) * stddev) # Compute the corrections for batch renorm. r = stddev / mixed_renorm_stddev d = (mean - mixed_renorm_mean) / mixed_renorm_stddev # Ensure the corrections use pre-update moving averages. with ops.control_dependencies([r, d]): mean = array_ops.identity(mean) stddev = array_ops.identity(stddev) rmin, rmax, dmax = [self.renorm_clipping.get(key) for key in ['rmin', 'rmax', 'dmax']] if rmin is not None: r = math_ops.maximum(r, rmin) if rmax is not None: r = math_ops.minimum(r, rmax) if dmax is not None: d = math_ops.maximum(d, -dmax) d = math_ops.minimum(d, dmax) # When not training, use r=1, d=0, and decay=1 meaning no updates. r = _smart_select(training, lambda: r, lambda: array_ops.ones_like(r)) d = _smart_select(training, lambda: d, lambda: array_ops.zeros_like(d)) decay = _smart_select(training, lambda: self.renorm_momentum, lambda: 1.) def _update_renorm_variable(var, weight, value): """Updates a moving average and weight, returns the unbiased value.""" # Update the variables without zero debiasing. The debiasing will be # accomplished by dividing the exponential moving average by the weight. # For example, after a single update, the moving average would be # (1-decay) * value. and the weight will be 1-decay, with their ratio # giving value. # Make sure the weight is not updated until before r and d computation. value = array_ops.identity(value) with ops.control_dependencies([value]): weight_value = array_ops.constant(1., dtype=weight.dtype) new_var = moving_averages.assign_moving_average( var, value, decay, zero_debias=False) new_weight = moving_averages.assign_moving_average( weight, weight_value, decay, zero_debias=False) return new_var / new_weight with ops.colocate_with(self.moving_mean): new_mean = _update_renorm_variable(self.renorm_mean, self.renorm_mean_weight, mean) with ops.colocate_with(self.moving_variance): new_stddev = _update_renorm_variable(self.renorm_stddev, self.renorm_stddev_weight, stddev) # Make sqrt(moving_variance + epsilon) = new_stddev. new_variance = math_ops.square(new_stddev) - self.epsilon return (r, d, new_mean, new_variance) def call(self, inputs, training=False): if self.fused: return self._fused_batch_norm(inputs, training=training) # First, compute the axes along which to reduce the mean / variance, # as well as the broadcast shape to be used for all parameters. input_shape = inputs.get_shape() ndim = len(input_shape) reduction_axes = list(range(len(input_shape))) del reduction_axes[self.axis] broadcast_shape = [1] * len(input_shape) broadcast_shape[self.axis] = input_shape[self.axis].value # Determines whether broadcasting is needed. needs_broadcasting = (sorted(reduction_axes) != list(range(ndim))[:-1]) scale, offset = self.gamma, self.beta # Determine a boolean value for `training`: could be True, False, or None. training_value = utils.constant_value(training) if training_value is not False: # Some of the computations here are not necessary when training==False # but not a constant. However, this makes the code simpler. mean, variance = nn.moments(inputs, reduction_axes) mean = _smart_select(training, lambda: mean, lambda: self.moving_mean) variance = _smart_select(training, lambda: variance, lambda: self.moving_variance) if self.renorm: r, d, new_mean, new_variance = self._renorm_correction_and_moments( mean, variance, training) # When training, the normalized values (say, x) will be transformed as # x * gamma + beta without renorm, and (x * r + d) * gamma + beta # = x * (r * gamma) + (d * gamma + beta) with renorm. scale = array_ops.stop_gradient(r, name='renorm_r') offset = array_ops.stop_gradient(d, name='renorm_d') if self.gamma is not None: scale *= self.gamma offset *= self.gamma if self.beta is not None: offset += self.beta else: new_mean, new_variance = mean, variance # Update moving averages when training, and prevent updates otherwise. decay = _smart_select(training, lambda: self.momentum, lambda: 1.) mean_update = moving_averages.assign_moving_average( self.moving_mean, new_mean, decay, zero_debias=False) variance_update = moving_averages.assign_moving_average( self.moving_variance, new_variance, decay, zero_debias=False) self.add_update(mean_update, inputs=inputs) self.add_update(variance_update, inputs=inputs) else: mean, variance = self.moving_mean, self.moving_variance def _broadcast(v): if needs_broadcasting and v is not None: # In this case we must explicitly broadcast all parameters. return array_ops.reshape(v, broadcast_shape) return v return nn.batch_normalization(inputs, _broadcast(mean), _broadcast(variance), _broadcast(offset), _broadcast(scale), self.epsilon) def batch_normalization(inputs, axis=-1, momentum=0.99, epsilon=1e-3, center=True, scale=True, beta_initializer=init_ops.zeros_initializer(), gamma_initializer=init_ops.ones_initializer(), moving_mean_initializer=init_ops.zeros_initializer(), moving_variance_initializer=init_ops.ones_initializer(), beta_regularizer=None, gamma_regularizer=None, training=False, trainable=True, name=None, reuse=None, renorm=False, renorm_clipping=None, renorm_momentum=0.99, fused=False): """Functional interface for the batch normalization layer. Reference: http://arxiv.org/abs/1502.03167 "Batch Normalization: Accelerating Deep Network Training by Reducing Internal Covariate Shift" Sergey Ioffe, Christian Szegedy Note: when training, the moving_mean and moving_variance need to be updated. By default the update ops are placed in `tf.GraphKeys.UPDATE_OPS`, so they need to be added as a dependency to the `train_op`. For example: ```python update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS) with tf.control_dependencies(update_ops): train_op = optimizer.minimize(loss) ``` Arguments: inputs: Tensor input. axis: Integer, the axis that should be normalized (typically the features axis). For instance, after a `Convolution2D` layer with `data_format="channels_first"`, set `axis=1` in `BatchNormalization`. momentum: Momentum for the moving average. epsilon: Small float added to variance to avoid dividing by zero. center: If True, add offset of `beta` to normalized tensor. If False, `beta` is ignored. scale: If True, multiply by `gamma`. If False, `gamma` is not used. When the next layer is linear (also e.g. `nn.relu`), this can be disabled since the scaling can be done by the next layer. beta_initializer: Initializer for the beta weight. gamma_initializer: Initializer for the gamma weight. moving_mean_initializer: Initializer for the moving mean. moving_variance_initializer: Initializer for the moving variance. beta_regularizer: Optional regularizer for the beta weight. gamma_regularizer: Optional regularizer for the gamma weight. training: Either a Python boolean, or a TensorFlow boolean scalar tensor (e.g. a placeholder). Whether to return the output in training mode (normalized with statistics of the current batch) or in inference mode (normalized with moving statistics). **NOTE**: make sure to set this parameter correctly, or else your training/inference will not work properly. trainable: Boolean, if `True` also add variables to the graph collection `GraphKeys.TRAINABLE_VARIABLES` (see tf.Variable). name: String, the name of the layer. reuse: Boolean, whether to reuse the weights of a previous layer by the same name. renorm: Whether to use Batch Renormalization (https://arxiv.org/abs/1702.03275). This adds extra variables during training. The inference is the same for either value of this parameter. renorm_clipping: A dictionary that may map keys 'rmax', 'rmin', 'dmax' to scalar `Tensors` used to clip the renorm correction. The correction `(r, d)` is used as `corrected_value = normalized_value * r + d`, with `r` clipped to [rmin, rmax], and `d` to [-dmax, dmax]. Missing rmax, rmin, dmax are set to inf, 0, inf, respectively. renorm_momentum: Momentum used to update the moving means and standard deviations with renorm. Unlike `momentum`, this affects training and should be neither too small (which would add noise) nor too large (which would give stale estimates). Note that `momentum` is still applied to get the means and variances for inference. fused: if `True`, use a faster, fused implementation based on nn.fused_batch_norm. If `None`, use the fused implementation if possible. Returns: Output tensor. """ layer = BatchNormalization( axis=axis, momentum=momentum, epsilon=epsilon, center=center, scale=scale, beta_initializer=beta_initializer, gamma_initializer=gamma_initializer, moving_mean_initializer=moving_mean_initializer, moving_variance_initializer=moving_variance_initializer, beta_regularizer=beta_regularizer, gamma_regularizer=gamma_regularizer, renorm=renorm, renorm_clipping=renorm_clipping, renorm_momentum=renorm_momentum, fused=fused, trainable=trainable, name=name, _reuse=reuse, _scope=name) return layer.apply(inputs, training=training) # Aliases BatchNorm = BatchNormalization batch_norm = batch_normalization # Helper function def _smart_select(pred, fn_then, fn_else): """Selects fn_then() or fn_else() based on the value of pred. The purpose of this function is the same as `utils.smart_cond`. However, at the moment there is a bug (b/36297356) that seems to kick in only when `smart_cond` delegates to `tf.cond`, which sometimes results in the training hanging when using parameter servers. This function will output the result of `fn_then` or `fn_else` if `pred` is known at graph construction time. Otherwise, it will use `tf.where` which will result in some redundant work (both branches will be computed but only one selected). However, the tensors involved will usually be small (means and variances in batchnorm), so the cost will be small and will not be incurred at all if `pred` is a constant. Args: pred: A boolean scalar `Tensor`. fn_then: A callable to use when pred==True. fn_else: A callable to use when pred==False. Returns: A `Tensor` whose value is fn_then() or fn_else() based on the value of pred. """ pred_value = utils.constant_value(pred) if pred_value: return fn_then() elif pred_value is False: return fn_else() t_then = array_ops.expand_dims(fn_then(), 0) t_else = array_ops.expand_dims(fn_else(), 0) pred = array_ops.reshape(pred, [1]) result = array_ops.where(pred, t_then, t_else) return array_ops.squeeze(result, [0])
apache-2.0
kevinastone/sentry
src/sentry/web/frontend/events.py
17
2355
""" sentry.web.frontend.events ~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ from __future__ import absolute_import import urlparse from django.core.context_processors import csrf from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.views.decorators.csrf import csrf_protect from sentry.models import Event from sentry.replays import Replayer from sentry.utils.http import safe_urlencode from sentry.web.decorators import has_group_access, render_to_response from sentry.web.forms import ReplayForm @has_group_access @csrf_protect def replay_event(request, organization, project, group, event_id): try: event = Event.objects.get(group=group, id=event_id) except Event.DoesNotExist: return HttpResponseRedirect(reverse('sentry')) Event.objects.bind_nodes([event], 'data') interfaces = event.interfaces if 'sentry.interfaces.Http' not in interfaces: # TODO: show a proper error return HttpResponseRedirect(reverse('sentry')) # TODO(mattrobenolt): Add Cookie as a header http = interfaces['sentry.interfaces.Http'] if http.headers: headers = '\n'.join('%s: %s' % (k, v) for k, v in http.headers.iteritems() if k[0].upper() == k[0]) else: headers = '' if isinstance(http.data, dict): data = safe_urlencode(http.data) else: data = http.data initial = { 'url': urlparse.urldefrag(http.full_url)[0], 'method': http.method, 'headers': headers, 'data': data, } form = ReplayForm(request.POST or None, initial=initial) if form.is_valid(): result = Replayer( url=form.cleaned_data['url'], method=form.cleaned_data['method'], data=form.cleaned_data['data'], headers=form.cleaned_data['headers'], ).replay() else: result = None context = { 'organization': organization, 'team': project.team, 'project': project, 'group': event.group, 'event': event, 'form': form, 'result': result, } context.update(csrf(request)) return render_to_response('sentry/events/replay_request.html', context, request)
bsd-3-clause
hlzz/dotfiles
graphics/VTK-7.0.0/Filters/General/Testing/Python/clipWedge.py
2
5544
#!/usr/bin/env python import vtk from vtk.test import Testing from vtk.util.misc import vtkGetDataRoot VTK_DATA_ROOT = vtkGetDataRoot() def GetRGBColor(colorName): ''' Return the red, green and blue components for a color as doubles. ''' rgb = [0.0, 0.0, 0.0] # black vtk.vtkNamedColors().GetColorRGB(colorName, rgb) return rgb # define a Single Cube Scalars = vtk.vtkFloatArray() Scalars.InsertNextValue(1.0) Scalars.InsertNextValue(0.0) Scalars.InsertNextValue(0.0) Scalars.InsertNextValue(0.0) Scalars.InsertNextValue(0.0) Scalars.InsertNextValue(0.0) Points = vtk.vtkPoints() Points.InsertNextPoint(0, 0, 0) Points.InsertNextPoint(1, 0, 0) Points.InsertNextPoint(0, 0, 1) Points.InsertNextPoint(0, 1, 0) Points.InsertNextPoint(1, 1, 0) Points.InsertNextPoint(0, 1, 1) Ids = vtk.vtkIdList() Ids.InsertNextId(0) Ids.InsertNextId(1) Ids.InsertNextId(2) Ids.InsertNextId(3) Ids.InsertNextId(4) Ids.InsertNextId(5) grid = vtk.vtkUnstructuredGrid() grid.Allocate(10, 10) grid.InsertNextCell(13, Ids) grid.SetPoints(Points) grid.GetPointData().SetScalars(Scalars) # Clip the wedge clipper = vtk.vtkClipDataSet() clipper.SetInputData(grid) clipper.SetValue(0.5) # build tubes for the triangle edges # wedgeEdges = vtk.vtkExtractEdges() wedgeEdges.SetInputConnection(clipper.GetOutputPort()) wedgeEdgeTubes = vtk.vtkTubeFilter() wedgeEdgeTubes.SetInputConnection(wedgeEdges.GetOutputPort()) wedgeEdgeTubes.SetRadius(.005) wedgeEdgeTubes.SetNumberOfSides(6) wedgeEdgeMapper = vtk.vtkPolyDataMapper() wedgeEdgeMapper.SetInputConnection(wedgeEdgeTubes.GetOutputPort()) wedgeEdgeMapper.ScalarVisibilityOff() wedgeEdgeActor = vtk.vtkActor() wedgeEdgeActor.SetMapper(wedgeEdgeMapper) wedgeEdgeActor.GetProperty().SetDiffuseColor(GetRGBColor('lamp_black')) wedgeEdgeActor.GetProperty().SetSpecular(.4) wedgeEdgeActor.GetProperty().SetSpecularPower(10) # shrink the triangles so we can see each one aShrinker = vtk.vtkShrinkFilter() aShrinker.SetShrinkFactor(1) aShrinker.SetInputConnection(clipper.GetOutputPort()) aMapper = vtk.vtkDataSetMapper() aMapper.ScalarVisibilityOff() aMapper.SetInputConnection(aShrinker.GetOutputPort()) Wedges = vtk.vtkActor() Wedges.SetMapper(aMapper) Wedges.GetProperty().SetDiffuseColor(GetRGBColor('banana')) # build a model of the cube Edges = vtk.vtkExtractEdges() Edges.SetInputData(grid) Tubes = vtk.vtkTubeFilter() Tubes.SetInputConnection(Edges.GetOutputPort()) Tubes.SetRadius(.01) Tubes.SetNumberOfSides(6) TubeMapper = vtk.vtkPolyDataMapper() TubeMapper.SetInputConnection(Tubes.GetOutputPort()) TubeMapper.ScalarVisibilityOff() CubeEdges = vtk.vtkActor() CubeEdges.SetMapper(TubeMapper) CubeEdges.GetProperty().SetDiffuseColor(GetRGBColor('khaki')) CubeEdges.GetProperty().SetSpecular(.4) CubeEdges.GetProperty().SetSpecularPower(10) # build the vertices of the cube # Sphere = vtk.vtkSphereSource() Sphere.SetRadius(0.04) Sphere.SetPhiResolution(20) Sphere.SetThetaResolution(20) ThresholdIn = vtk.vtkThresholdPoints() ThresholdIn.SetInputData(grid) ThresholdIn.ThresholdByUpper(.5) Vertices = vtk.vtkGlyph3D() Vertices.SetInputConnection(ThresholdIn.GetOutputPort()) Vertices.SetSourceConnection(Sphere.GetOutputPort()) SphereMapper = vtk.vtkPolyDataMapper() SphereMapper.SetInputConnection(Vertices.GetOutputPort()) SphereMapper.ScalarVisibilityOff() CubeVertices = vtk.vtkActor() CubeVertices.SetMapper(SphereMapper) CubeVertices.GetProperty().SetDiffuseColor(GetRGBColor('tomato')) # define the text for the labels caseLabel = vtk.vtkVectorText() caseLabel.SetText("Case 1") aLabelTransform = vtk.vtkTransform() aLabelTransform.Identity() aLabelTransform.Translate(-.2, 0, 1.25) aLabelTransform.Scale(.05, .05, .05) labelTransform = vtk.vtkTransformPolyDataFilter() labelTransform.SetTransform(aLabelTransform) labelTransform.SetInputConnection(caseLabel.GetOutputPort()) labelMapper = vtk.vtkPolyDataMapper() labelMapper.SetInputConnection(labelTransform.GetOutputPort()) labelActor = vtk.vtkActor() labelActor.SetMapper(labelMapper) # define the base baseModel = vtk.vtkCubeSource() baseModel.SetXLength(1.5) baseModel.SetYLength(.01) baseModel.SetZLength(1.5) baseMapper = vtk.vtkPolyDataMapper() baseMapper.SetInputConnection(baseModel.GetOutputPort()) base = vtk.vtkActor() base.SetMapper(baseMapper) # Create the RenderWindow, Renderer and both Actors # ren1 = vtk.vtkRenderer() renWin = vtk.vtkRenderWindow() renWin.AddRenderer(ren1) iren = vtk.vtkRenderWindowInteractor() iren.SetRenderWindow(renWin) # position the base base.SetPosition(.5, -.09, .5) ren1.AddActor(wedgeEdgeActor) ren1.AddActor(base) ren1.AddActor(labelActor) ren1.AddActor(CubeEdges) ren1.AddActor(CubeVertices) ren1.AddActor(Wedges) ren1.SetBackground(GetRGBColor('slate_grey')) renWin.SetSize(400, 400) ren1.ResetCamera() ren1.GetActiveCamera().Dolly(1.2) ren1.GetActiveCamera().Azimuth(30) ren1.GetActiveCamera().Elevation(20) ren1.ResetCameraClippingRange() renWin.Render() iren.Initialize() def cases (id, mask): i = 0 while i < 6: m = mask[i] if m & id == 0: Scalars.SetValue(i, 0) pass else: Scalars.SetValue(i, 1) pass caseLabel.SetText("Case " + str(id) + "") i += 1 grid.Modified() renWin.Render() mask = [1, 2, 4, 8, 16, 32] cases(7, mask) # iren.Start()
bsd-3-clause
EliotBerriot/django
django/contrib/sessions/backends/db.py
227
3637
import logging from django.contrib.sessions.backends.base import CreateError, SessionBase from django.core.exceptions import SuspiciousOperation from django.db import IntegrityError, router, transaction from django.utils import timezone from django.utils.encoding import force_text from django.utils.functional import cached_property class SessionStore(SessionBase): """ Implements database session store. """ def __init__(self, session_key=None): super(SessionStore, self).__init__(session_key) @classmethod def get_model_class(cls): # Avoids a circular import and allows importing SessionStore when # django.contrib.sessions is not in INSTALLED_APPS. from django.contrib.sessions.models import Session return Session @cached_property def model(self): return self.get_model_class() def load(self): try: s = self.model.objects.get( session_key=self.session_key, expire_date__gt=timezone.now() ) return self.decode(s.session_data) except (self.model.DoesNotExist, SuspiciousOperation) as e: if isinstance(e, SuspiciousOperation): logger = logging.getLogger('django.security.%s' % e.__class__.__name__) logger.warning(force_text(e)) self._session_key = None return {} def exists(self, session_key): return self.model.objects.filter(session_key=session_key).exists() def create(self): while True: self._session_key = self._get_new_session_key() try: # Save immediately to ensure we have a unique entry in the # database. self.save(must_create=True) except CreateError: # Key wasn't unique. Try again. continue self.modified = True return def create_model_instance(self, data): """ Return a new instance of the session model object, which represents the current session state. Intended to be used for saving the session data to the database. """ return self.model( session_key=self._get_or_create_session_key(), session_data=self.encode(data), expire_date=self.get_expiry_date(), ) def save(self, must_create=False): """ Saves the current session data to the database. If 'must_create' is True, a database error will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). """ if self.session_key is None: return self.create() data = self._get_session(no_load=must_create) obj = self.create_model_instance(data) using = router.db_for_write(self.model, instance=obj) try: with transaction.atomic(using=using): obj.save(force_insert=must_create, using=using) except IntegrityError: if must_create: raise CreateError raise def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key try: self.model.objects.get(session_key=session_key).delete() except self.model.DoesNotExist: pass @classmethod def clear_expired(cls): cls.get_model_class().objects.filter(expire_date__lt=timezone.now()).delete()
bsd-3-clause
tcstewar/model-explorer
modex/ui/pytag.py
1
4483
""" PyTag Version: 2005-06-11 Author: Terry Stewart terry.stewart@gmail.com http://terrystewart.ca This software is released under the GNU General Public License. See http://www.gnu.org/copyleft/gpl.html for more details. This is a utility for easily generating valid XHTML from within a Python program. It is inspired by Nevow's <http://nevow.com> 'Stan' module, which is pretty much exactly the same as this one. I just wrote my own version because I felt like it. Here is a quick example of what you can do: from pytag import T print T.html[ T.head[T.title["This is the title"]], T.body[ T.h1["Heading"], T.hr, T.p(align='center')[ "Hello and welcome to my ", T.em["nifty"], " web page." ] ] ] As you can see, this is a rather odd (ab)use of the [] and () syntax. You create tags by saying T.tagname (using any tagname you want). You set the attributes of the tag by "calling" it. T.tag(name=value, othername=othervalue) You set the contents of the tag using the square brackets. T.tag['some stuff',T.othertag,'can go here'] You can do them both at the same time, so you can define an html link like this: T.a(href='page.html')['click here'] or T.a['click here'](href='page.html') You can even do this to simplify the syntax for common tags: from pytag import * print html[ head[title["This is the title"]], body[ h1["Heading"], hr, p(align='center')[ "Hello and welcome to my ", em["nifty"], " web page." ] ] ] If you add things more than once, they get appended together myList=T.ol for i in range(10): mylist[T.li['this is item #%d'%i]] print myList You can also use the Python list comletion syntax to do something like that print T.ol[[T.li['this is item #%d'%i for i in range(10)]]] (note the currently-required double-square brackets in that example. It is possible that that will become optional when list completions become more general in newer versions of Python) One other capability is to create template tags. These are just like normal tags, but when you add things to them they create a new instance of the tag, rather than accumulating. myP=T.p(align='center',size='+1') myP.lock() print T.body[ T.h1['Header'], myP['Here is paragraph 1'], myP['Here is paragraph 2'], ] For further documentation, see http://terrystewart.ca/pytag.html **** History - 2005-06-11: Initial Release """ import copy def _flatten(args): for arg in args: if type(arg) in (type(()),type([])): for x in arg: for f in _flatten(x): yield f else: yield arg class Tag: def __init__(self,name): self.name=name self.attr={} self.content=[] self.locked=False def lock(self): self.locked=True return self def __getitem__(self,content): if self.locked: self=copy.deepcopy(self) self.locked=False if type(content) not in (type(()),type([])): self.content.append(content) else: self.content.extend(_flatten(content)) return self def __call__(self,**attr): if self.locked: self=copy.deepcopy(self) self.locked=False for k,v in attr.items(): v=str(v) v='"%s"'%v self.attr[k]=v return self def __str__(self): attr='' if len(self.attr)>0: attr=' '+' '.join(['%s=%s'%kv for kv in self.attr.items()]) if len(self.content)==0: return '<%s%s />'%(self.name,attr) else: content=''.join([unicode(x) for x in self.content]) return '<%s%s>%s</%s>'%(self.name,attr,content,self.name) class _TagMaker: def __getattr__(self,key): return Tag(key) T=_TagMaker() _tagList="""html head title body p em strong ol li ul blockquote br center cite code dd dt dl h1 h2 h3 h4 h5 h6 hr img meta small big a form input option select optgroup textarea style sub sup tt table tr td th """.split() for t in _tagList: exec('%s=T.%s;%s.lock()'%(t,t,t)) def _pytagtest(): print p['Hello world'] print body[T.p['Hello']] print html[ head[T.title["This is the title"]], body[ h1["Heading"], p(align='center')[ "Hello and welcome to my ", em["nifty"], " web page." ], br, "Click ",a(href="page.html")['here']," to go somewhere else." ] ] print ol[[li[x] for x in range(10)]] cp=p(align='center') cp.lock() print p[cp(a='b')['p 1'],cp['p 2']] if __name__=='__main__': _pytagtest()
gpl-3.0
CyanogenMod/android_external_chromium_org_third_party_skia
platform_tools/android/tests/makefile_writer_tests.py
67
7198
#!/usr/bin/python # Copyright 2014 Google Inc. # # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ Test makefile_writer.py """ import argparse import os import shutil import sys import tempfile import test_variables import unittest import utils sys.path.append(test_variables.GYP_GEN_DIR) import makefile_writer import tool_makefile_writer import vars_dict_lib MAKEFILE_NAME = test_variables.ANDROID_MK REBASELINE_MSG = ('If you\'ve modified makefile_writer.py, run ' '"makefile_writer_tests.py --rebaseline" to rebaseline') TOOL_DIR = 'tool' def generate_dummy_vars_dict(name): """Create a VarsDict and fill it with dummy entries. Args: name: string to be appended to each entry, if not None. Returns: A VarsDict with dummy entries. """ vars_dict = vars_dict_lib.VarsDict() for key in vars_dict.keys(): entry = key.lower() if name: entry += '_' + name vars_dict[key].add(entry) return vars_dict def generate_write_local_vars_params(): """Generator to compute params for write_local_vars tests. Each iteration yields a new tuple: (filename, append, name), specific to a way to call write_local_vars for the tests. Yields: filename: filename corresponding to the expectation file for this combination of params to write_local_vars. append: boolean to pass as append parameter to write_local_vars. name: string to pass as name parameter to write_local_vars. """ for append in [ True, False ]: for name in [ None, 'arm', 'foo' ]: filename = 'write_local_vars' if append: filename += '_append' else: filename += '_no_append' if name: filename += '_' + name else: filename += '_no_name' yield (filename, append, name) def generate_dummy_vars_dict_data(name, condition): """Create a dummy VarsDictData. Create a dummy VarsDictData, using the name for both the contained VarsDict and the VarsDictData Args: name: name used by both the returned VarsDictData and its contained VarsDict. condition: condition used by the returned VarsDictData. Returns: A VarsDictData with dummy values, using the passed in info. """ vars_dict = generate_dummy_vars_dict(name) return makefile_writer.VarsDictData(vars_dict=vars_dict, name=name, condition=condition) def generate_dummy_makefile(target_dir): """Create a dummy makefile to demonstrate how it works. Use dummy values unrelated to any gyp files. Its output should remain the same unless/until makefile_writer.write_android_mk changes. Args: target_dir: directory in which to write the resulting Android.mk """ common_vars_dict = generate_dummy_vars_dict(None) deviation_params = [('foo', 'COND'), ('bar', None)] deviations = [generate_dummy_vars_dict_data(name, condition) for (name, condition) in deviation_params] makefile_writer.write_android_mk(target_dir=target_dir, common=common_vars_dict, deviations_from_common=deviations) def generate_dummy_tool_makefile(target_dir): """Create a dummy makefile for a tool. Args: target_dir: directory in which to write the resulting Android.mk """ vars_dict = generate_dummy_vars_dict(None) tool_makefile_writer.write_tool_android_mk(target_dir=target_dir, var_dict=vars_dict, place_in_local_tmp=False) class MakefileWriterTest(unittest.TestCase): def test_write_group_empty(self): f = tempfile.TemporaryFile() assert f.tell() == 0 for empty in (None, []): for truth in (True, False): makefile_writer.write_group(f, 'name', empty, truth) self.assertEqual(f.tell(), 0) f.close() def test_write_group(self): animals = ('dog', 'cat', 'mouse', 'elephant') fd, filename = tempfile.mkstemp() with open(filename, 'w') as f: makefile_writer.write_group(f, 'animals', animals, False) os.close(fd) # Now confirm that it matches expectations utils.compare_to_expectation(filename, 'animals.txt', self.assertTrue) with open(filename, 'w') as f: makefile_writer.write_group(f, 'animals_append', animals, True) # Now confirm that it matches expectations utils.compare_to_expectation(filename, 'animals_append.txt', self.assertTrue) os.remove(filename) def test_write_local_vars(self): vars_dict = generate_dummy_vars_dict(None) # Compare various ways of calling write_local_vars to expectations. for (filename, append, name) in generate_write_local_vars_params(): fd, outfile = tempfile.mkstemp() with open(outfile, 'w') as f: makefile_writer.write_local_vars(f, vars_dict, append, name) os.close(fd) # Compare to the expected file. utils.compare_to_expectation(outfile, filename, self.assertTrue, REBASELINE_MSG) # KNOWN_TARGETS is always a key in the input VarsDict, but it should not # be written to the resulting file. # Note that this assumes none of our dummy entries is 'KNOWN_TARGETS'. known_targets_name = 'KNOWN_TARGETS' self.assertEqual(len(vars_dict[known_targets_name]), 1) with open(outfile, 'r') as f: self.assertNotIn(known_targets_name, f.read()) os.remove(outfile) def test_write_android_mk(self): outdir = tempfile.mkdtemp() generate_dummy_makefile(outdir) utils.compare_to_expectation(os.path.join(outdir, MAKEFILE_NAME), MAKEFILE_NAME, self.assertTrue, REBASELINE_MSG) shutil.rmtree(outdir) def test_tool_writer(self): outdir = tempfile.mkdtemp() tool_dir = os.path.join(outdir, TOOL_DIR) os.mkdir(tool_dir) generate_dummy_tool_makefile(tool_dir) utils.compare_to_expectation(os.path.join(tool_dir, MAKEFILE_NAME), os.path.join(TOOL_DIR, MAKEFILE_NAME), self.assertTrue, REBASELINE_MSG) def main(): loader = unittest.TestLoader() suite = loader.loadTestsFromTestCase(MakefileWriterTest) results = unittest.TextTestRunner(verbosity=2).run(suite) print repr(results) if not results.wasSuccessful(): raise Exception('failed one or more unittests') def rebaseline(): generate_dummy_makefile(utils.EXPECTATIONS_DIR) vars_dict = generate_dummy_vars_dict(None) for (filename, append, name) in generate_write_local_vars_params(): with open(os.path.join(utils.EXPECTATIONS_DIR, filename), 'w') as f: makefile_writer.write_local_vars(f, vars_dict, append, name) generate_dummy_tool_makefile(os.path.join(utils.EXPECTATIONS_DIR, TOOL_DIR)) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('-r', '--rebaseline', help='Rebaseline expectations.', action='store_true') args = parser.parse_args() if args.rebaseline: rebaseline() else: main()
bsd-3-clause
svenstaro/uni-projekt
hardware/test/test_jumpunit.py
1
1624
from unittest import TestCase from myhdl import * from hardware.jumpunit import * class DutClass(): """Wrapper around DUT""" def __init__(self): self.code = Signal(intbv(0)[5:]) self.Z, self.N, self.C, self.V, self.out = [Signal(bool(0)) for _ in range(5)] def Gens(self, trace = False): self.args = [self.code, self.Z, self.N, self.C, self.V, self.out] return traceSignals(jumpunit, *self.args) if trace else jumpunit(*self.args) def genSim(verifyMethod, cl=DutClass, clkfreq=1, trace=False): """ Generates a Simulation Object """ dut_cl = cl() @instance def stimulus(): yield verifyMethod(dut_cl, dut) raise StopSimulation dut = dut_cl.Gens(trace=trace) return Simulation(dut, stimulus) class JumpUnitTest(TestCase): def testJumpUnit(self): def verify(cl, dut): assert isinstance(cl, DutClass) tests = [(0b00000, 0,0,0,0, 0), (0b00001, 0,0,0,0, 1), (0b00010, 0,0,0,0, 0), (0b00010, 0,0,0,1, 1), (0b00011, 0,0,0,0, 1), (0b00011, 0,0,0,1, 0), ] for t in tests: cl.code.next = t[0] cl.Z.next = t[1] cl.N.next = t[2] cl.C.next = t[3] cl.V.next = t[4] yield delay(1) self.assertEquals(t[5], cl.out, msg="%s != %s %s %s %s %s %s" % (cl.out, bin(t[0],width=5),t[1],t[2],t[3],t[4],t[5])) genSim(verify).run() # vim: set ft=python:
gpl-3.0
jq-shell/python-jqsh
jqsh/channel.py
1
8548
import contextlib import functools import jqsh.context import queue import threading class Terminator: """a special value used to signal the end of a channel""" def coerce_other(f): @functools.wraps(f) def wrapper(self, other): import jqsh.values return f(self, jqsh.values.from_native(other)) return wrapper class Channel: _globals = None _locals = None _format_strings = None _context = None input_terminated = False # has the terminator been pushed? terminated = False # has the terminator been popped? def __init__(self, *args, global_namespace=None, local_namespace=None, format_strings=None, terminated=False, empty_namespaces=None, context=None): self.input_lock = threading.Lock() self.output_lock = threading.Lock() # namespaces and context if empty_namespaces is None: empty_namespaces = terminated if empty_namespaces: if global_namespace is None: global_namespace = {} if local_namespace is None: local_namespace = {} if format_strings is None: format_strings = {} if context is None: context = jqsh.context.FilterContext() self.has_globals = threading.Event() self.has_locals = threading.Event() self.has_format_strings = threading.Event() self.has_context = threading.Event() if global_namespace is not None: self.global_namespace = global_namespace if local_namespace is not None: self.local_namespace = local_namespace if format_strings is not None: self.format_strings = format_strings if context is not None: self.context = context # values self.value_queue = queue.Queue() for value in args: self.push(value) if terminated: self.terminate() def __iter__(self): return self def __next__(self): """An alternative to calling pop. Raises StopIteration if the channel is terminated.""" return self.pop() def __truediv__(self, other): """Splits the channel into multiple channels: All values that have not yet been read from this channel, and any values that are added later, will be copied into the other channels. The original channel will appear to be terminated immediately, and the split channels will terminate when the original channel is actually terminated. The split channels are returned as a tuple. """ import jqsh.filter def spread_values(split_channels): while True: value = self.value_queue.get() if isinstance(value, Terminator): for chan in split_channels: chan.terminate() break self.store_value(value) for chan in split_channels: chan.push(value) try: other = int(other) except: return NotImplemented buffered_values = [] with self.output_lock: if self.terminated: return tuple([Channel(terminated=True)] * other) self.terminated = True self.value_queue.put(Terminator()) while True: value = self.value_queue.get() if isinstance(value, Terminator): break self.store_value(value) buffered_values.append(value) ret = [Channel(*buffered_values) for _ in range(other)] threading.Thread(target=spread_values, args=(ret,)).start() threading.Thread(target=self.push_namespaces, args=tuple(ret)).start() return tuple(ret) @property def global_namespace(self): self.has_globals.wait() return self._globals @global_namespace.setter def global_namespace(self, value): self._globals = value self.has_globals.set() @property def local_namespace(self): self.has_locals.wait() return self._locals @local_namespace.setter def local_namespace(self, value): self._locals = value self.has_locals.set() @property def format_strings(self): self.has_format_strings.wait() return self._format_strings @format_strings.setter def format_strings(self, value): self._format_strings = value self.has_format_strings.set() @property def context(self): self.has_context.wait() return self._context @context.setter def context(self, value): self._context = value self.has_context.set() def get_namespaces(self, from_channel, include_context=True): from_channel.push_namespaces(self, include_context=include_context) def namespaces(self): return self.global_namespace, self.local_namespace, self.format_strings def pop(self, wait=True): """Returns a value. Raises queue.Empty if no element is currently available, and StopIteration if the channel is terminated.""" with self.output_lock: if self.terminated: raise StopIteration('jqsh channel has terminated') ret = self.value_queue.get(block=wait) if isinstance(ret, Terminator): self.terminated = True raise StopIteration('jqsh channel has terminated') self.store_value(ret) return ret def pull(self, from_channel, terminate=True): """Move all values from from_channel to this one, blocking until from_channel terminates, then optionally terminate.""" if terminate: with self.input_lock: if self.input_terminated: raise RuntimeError('jqsh channel has terminated') self.input_terminated = True else: self.input_lock.acquire() while True: try: value = from_channel.pop() except StopIteration: break self.value_queue.put(value) if terminate: self.value_queue.put(Terminator()) else: self.input_lock.release() @coerce_other def push(self, value): with self.input_lock: if self.input_terminated: raise RuntimeError('jqsh channel has terminated') self.value_queue.put(value) def push_attribute(self, attribute_name, *output_channels): """Waits until the attribute is available, then passes it unchanged to the output channels. Used by Filter.run_raw and Channel.push_namespaces.""" attribute_value = getattr(self, attribute_name) for chan in output_channels: setattr(chan, attribute_name, attribute_value) def push_namespaces(self, *output_channels, include_context=True): threads = [] for attribute_name in ['global_namespace', 'local_namespace', 'format_strings'] + (['context'] if include_context else []): thread = threading.Thread(target=self.push_attribute, args=(attribute_name,) + output_channels) thread.start() threads.append(thread) for thread in threads: thread.join() def store_value(self, value): pass # subclass this if required, by default channels don't store values def terminate(self): with self.input_lock: self.input_terminated = True self.value_queue.put(Terminator()) def throw(self, exception): """Tries to append the exception onto the channel, failing silently if terminated, then defines all properties for which events are defined, and terminates.""" import jqsh.values if isinstance(exception, str) or isinstance(exception, jqsh.values.String): exception = jqsh.values.JQSHException(exception) with contextlib.suppress(RuntimeError): self.push(exception) if self._globals is None: self.global_namespace = {} if self._locals is None: self.local_namespace = {} if self._format_strings is None: self.format_strings = {} if self._context is None: self.context = jqsh.context.FilterContext() self.terminate()
mit
b0ttl3z/SickRage
sickbeard/notifiers/prowl.py
5
7356
# coding=utf-8 # Author: Nic Wolfe <nic@wolfeden.ca> # URL: https://sickrage.github.io # # This file is part of SickRage. # # SickRage is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # SickRage is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with SickRage. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from __future__ import unicode_literals import ast import socket import time from requests.compat import urlencode from six.moves.http_client import HTTPException, HTTPSConnection import sickbeard from sickbeard import common, db, logger from sickrage.helper.encoding import ss try: # this only exists in 2.6 from ssl import SSLError except ImportError: # make a fake one since I don't know what it is supposed to be in 2.5 class SSLError(Exception): pass class Notifier(object): def test_notify(self, prowl_api, prowl_priority): return self._send_prowl(prowl_api, prowl_priority, event="Test", message="Testing Prowl settings from SickRage", force=True) def notify_snatch(self, ep_name): ep_name = ss(ep_name) if sickbeard.PROWL_NOTIFY_ONSNATCH: show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SNATCH], message=ep_name + " :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET)) def notify_download(self, ep_name): ep_name = ss(ep_name) if sickbeard.PROWL_NOTIFY_ONDOWNLOAD: show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_DOWNLOAD], message=ep_name + " :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET)) def notify_subtitle_download(self, ep_name, lang): ep_name = ss(ep_name) if sickbeard.PROWL_NOTIFY_ONSUBTITLEDOWNLOAD: show = self._parse_episode(ep_name) recipients = self._generate_recipients(show) if not recipients: logger.log('Skipping prowl notify because there are no configured recipients', logger.DEBUG) else: for api in recipients: self._send_prowl(prowl_api=api, prowl_priority=None, event=common.notifyStrings[common.NOTIFY_SUBTITLE_DOWNLOAD], message=ep_name + " [" + lang + "] :: " + time.strftime(sickbeard.DATE_PRESET + " " + sickbeard.TIME_PRESET)) def notify_git_update(self, new_version="??"): if sickbeard.USE_PROWL: update_text = common.notifyStrings[common.NOTIFY_GIT_UPDATE_TEXT] title = common.notifyStrings[common.NOTIFY_GIT_UPDATE] self._send_prowl(prowl_api=None, prowl_priority=None, event=title, message=update_text + new_version) def notify_login(self, ipaddress=""): if sickbeard.USE_PROWL: update_text = common.notifyStrings[common.NOTIFY_LOGIN_TEXT] title = common.notifyStrings[common.NOTIFY_LOGIN] self._send_prowl(prowl_api=None, prowl_priority=None, event=title, message=update_text.format(ipaddress)) @staticmethod def _generate_recipients(show=None): apis = [] mydb = db.DBConnection(row_type='dict') # Grab the global recipient(s) if sickbeard.PROWL_API: for api in sickbeard.PROWL_API.split(','): if api.strip(): apis.append(api.strip()) # Grab the per-show-notification recipients if show is not None: for value in show: for subs in mydb.select("SELECT notify_list FROM tv_shows WHERE show_name = ?", (value,)): if subs['notify_list'] and subs['notify_list'][0] == '{': # legacy format handling entries = dict(ast.literal_eval(subs['notify_list'])) for api in entries['prowlAPIs'].split(','): if api.strip(): apis.append(api.strip()) apis = set(apis) return apis @staticmethod def _send_prowl(prowl_api=None, prowl_priority=None, event=None, message=None, force=False): if not sickbeard.USE_PROWL and not force: return False if prowl_api is None: prowl_api = sickbeard.PROWL_API if len(prowl_api) == 0: return False if prowl_priority is None: prowl_priority = sickbeard.PROWL_PRIORITY title = sickbeard.PROWL_MESSAGE_TITLE logger.log("PROWL: Sending notice with details: title=\"{0}\" event=\"{1}\", message=\"{2}\", priority={3}, api={4}".format(title, event, message, prowl_priority, prowl_api), logger.DEBUG) http_handler = HTTPSConnection("api.prowlapp.com") data = {'apikey': prowl_api, 'application': title, 'event': event, 'description': message.encode('utf-8'), 'priority': prowl_priority} try: http_handler.request("POST", "/publicapi/add", headers={'Content-type': "application/x-www-form-urlencoded"}, body=urlencode(data)) except (SSLError, HTTPException, socket.error): logger.log("Prowl notification failed.", logger.ERROR) return False response = http_handler.getresponse() request_status = response.status if request_status == 200: logger.log("Prowl notifications sent.", logger.INFO) return True elif request_status == 401: logger.log("Prowl auth failed: {0}".format(response.reason), logger.ERROR) return False else: logger.log("Prowl notification failed.", logger.ERROR) return False @staticmethod def _parse_episode(ep_name): ep_name = ss(ep_name) sep = " - " titles = ep_name.split(sep) titles.sort(key=len, reverse=True) logger.log("TITLES: {0}".format(titles), logger.DEBUG) return titles
gpl-3.0
bocaaust/FreshLife
django_project/env/lib/python2.7/site-packages/setuptools/command/install_lib.py
431
3840
import os import imp from itertools import product, starmap import distutils.command.install_lib as orig class install_lib(orig.install_lib): """Don't add compiled flags to filenames of non-Python files""" def run(self): self.build() outfiles = self.install() if outfiles is not None: # always compile, in case we have any extension stubs to deal with self.byte_compile(outfiles) def get_exclusions(self): """ Return a collections.Sized collections.Container of paths to be excluded for single_version_externally_managed installations. """ all_packages = ( pkg for ns_pkg in self._get_SVEM_NSPs() for pkg in self._all_packages(ns_pkg) ) excl_specs = product(all_packages, self._gen_exclusion_paths()) return set(starmap(self._exclude_pkg_path, excl_specs)) def _exclude_pkg_path(self, pkg, exclusion_path): """ Given a package name and exclusion path within that package, compute the full exclusion path. """ parts = pkg.split('.') + [exclusion_path] return os.path.join(self.install_dir, *parts) @staticmethod def _all_packages(pkg_name): """ >>> list(install_lib._all_packages('foo.bar.baz')) ['foo.bar.baz', 'foo.bar', 'foo'] """ while pkg_name: yield pkg_name pkg_name, sep, child = pkg_name.rpartition('.') def _get_SVEM_NSPs(self): """ Get namespace packages (list) but only for single_version_externally_managed installations and empty otherwise. """ # TODO: is it necessary to short-circuit here? i.e. what's the cost # if get_finalized_command is called even when namespace_packages is # False? if not self.distribution.namespace_packages: return [] install_cmd = self.get_finalized_command('install') svem = install_cmd.single_version_externally_managed return self.distribution.namespace_packages if svem else [] @staticmethod def _gen_exclusion_paths(): """ Generate file paths to be excluded for namespace packages (bytecode cache files). """ # always exclude the package module itself yield '__init__.py' yield '__init__.pyc' yield '__init__.pyo' if not hasattr(imp, 'get_tag'): return base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) yield base + '.pyc' yield base + '.pyo' yield base + '.opt-1.pyc' yield base + '.opt-2.pyc' def copy_tree( self, infile, outfile, preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 ): assert preserve_mode and preserve_times and not preserve_symlinks exclude = self.get_exclusions() if not exclude: return orig.install_lib.copy_tree(self, infile, outfile) # Exclude namespace package __init__.py* files from the output from setuptools.archive_util import unpack_directory from distutils import log outfiles = [] def pf(src, dst): if dst in exclude: log.warn("Skipping installation of %s (namespace package)", dst) return False log.info("copying %s -> %s", src, os.path.dirname(dst)) outfiles.append(dst) return dst unpack_directory(infile, outfile, pf) return outfiles def get_outputs(self): outputs = orig.install_lib.get_outputs(self) exclude = self.get_exclusions() if exclude: return [f for f in outputs if f not in exclude] return outputs
apache-2.0
crakensio/django_training
lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/topology.py
221
2936
""" This module houses the GEOS ctypes prototype functions for the topological operations on geometries. """ __all__ = ['geos_boundary', 'geos_buffer', 'geos_centroid', 'geos_convexhull', 'geos_difference', 'geos_envelope', 'geos_intersection', 'geos_linemerge', 'geos_pointonsurface', 'geos_preservesimplify', 'geos_simplify', 'geos_symdifference', 'geos_union', 'geos_relate'] from ctypes import c_double, c_int from django.contrib.gis.geos.libgeos import geos_version_info, GEOM_PTR, GEOS_PREPARE from django.contrib.gis.geos.prototypes.errcheck import check_geom, check_minus_one, check_string from django.contrib.gis.geos.prototypes.geom import geos_char_p from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc def topology(func, *args, **kwargs): "For GEOS unary topology functions." argtypes = [GEOM_PTR] if args: argtypes += args func.argtypes = argtypes func.restype = kwargs.get('restype', GEOM_PTR) func.errcheck = kwargs.get('errcheck', check_geom) return func ### Topology Routines ### geos_boundary = topology(GEOSFunc('GEOSBoundary')) geos_buffer = topology(GEOSFunc('GEOSBuffer'), c_double, c_int) geos_centroid = topology(GEOSFunc('GEOSGetCentroid')) geos_convexhull = topology(GEOSFunc('GEOSConvexHull')) geos_difference = topology(GEOSFunc('GEOSDifference'), GEOM_PTR) geos_envelope = topology(GEOSFunc('GEOSEnvelope')) geos_intersection = topology(GEOSFunc('GEOSIntersection'), GEOM_PTR) geos_linemerge = topology(GEOSFunc('GEOSLineMerge')) geos_pointonsurface = topology(GEOSFunc('GEOSPointOnSurface')) geos_preservesimplify = topology(GEOSFunc('GEOSTopologyPreserveSimplify'), c_double) geos_simplify = topology(GEOSFunc('GEOSSimplify'), c_double) geos_symdifference = topology(GEOSFunc('GEOSSymDifference'), GEOM_PTR) geos_union = topology(GEOSFunc('GEOSUnion'), GEOM_PTR) # GEOSRelate returns a string, not a geometry. geos_relate = GEOSFunc('GEOSRelate') geos_relate.argtypes = [GEOM_PTR, GEOM_PTR] geos_relate.restype = geos_char_p geos_relate.errcheck = check_string # Routines only in GEOS 3.1+ if GEOS_PREPARE: geos_cascaded_union = GEOSFunc('GEOSUnionCascaded') geos_cascaded_union.argtypes = [GEOM_PTR] geos_cascaded_union.restype = GEOM_PTR __all__.append('geos_cascaded_union') # Linear referencing routines info = geos_version_info() if info['version'] >= '3.2.0': geos_project = topology(GEOSFunc('GEOSProject'), GEOM_PTR, restype=c_double, errcheck=check_minus_one) geos_interpolate = topology(GEOSFunc('GEOSInterpolate'), c_double) geos_project_normalized = topology(GEOSFunc('GEOSProjectNormalized'), GEOM_PTR, restype=c_double, errcheck=check_minus_one) geos_interpolate_normalized = topology(GEOSFunc('GEOSInterpolateNormalized'), c_double) __all__.extend(['geos_project', 'geos_interpolate', 'geos_project_normalized', 'geos_interpolate_normalized'])
cc0-1.0
iizukak/ecg-htm
src/model_params/model_params_common.py
1
5006
# ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2013, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- MODEL_PARAMS = {'aggregationInfo': {'days': 0, 'fields': [(u'timestamp', 'first'), (u'wavelet_value', 'sum'), (u'raw_value', 'first')], 'hours': 0, 'microseconds': 0, 'milliseconds': 0, 'minutes': 0, 'months': 0, 'seconds': 0, 'weeks': 0, 'years': 0}, 'model': 'CLA', 'modelParams': {'anomalyParams': {u'anomalyCacheRecords': None, u'autoDetectThreshold': None, u'autoDetectWaitRecords': None}, 'clParams': {'alpha': 0.001973125, 'clVerbosity': 0, 'regionName': 'CLAClassifierRegion', 'steps': '1'}, 'inferenceType': 'TemporalAnomaly', 'sensorParams': {'encoders': {u'raw_value': None, u'timestamp_dayOfWeek': {'dayOfWeek': (21, 1.09375), 'fieldname': 'timestamp', 'name': 'timestamp', 'type': 'DateEncoder'}, u'timestamp_timeOfDay': None, u'timestamp_weekend': None, u'wavelet_value': {'clipInput': True, 'fieldname': 'wavelet_value', 'n': 109, 'name': 'wavelet_value', 'type': 'AdaptiveScalarEncoder', 'w': 21}}, 'sensorAutoReset': None, 'verbosity': 0}, 'spEnable': True, 'spParams': {'columnCount': 2048, 'globalInhibition': 1, 'inputWidth': 0, 'maxBoost': 2.0, 'numActiveColumnsPerInhArea': 40, 'potentialPct': 0.8, 'seed': 1956, 'spVerbosity': 0, 'spatialImp': 'cpp', 'synPermActiveInc': 0.05, 'synPermConnected': 0.1, 'synPermInactiveDec': 0.098130625}, 'tpEnable': True, 'tpParams': {'activationThreshold': 12, 'cellsPerColumn': 32, 'columnCount': 2048, 'globalDecay': 0.0, 'initialPerm': 0.21, 'inputWidth': 2048, 'maxAge': 0, 'maxSegmentsPerCell': 128, 'maxSynapsesPerSegment': 32, 'minThreshold': 9, 'newSynapseCount': 20, 'outputType': 'normal', 'pamLength': 1, 'permanenceDec': 0.1, 'permanenceInc': 0.1, 'seed': 1960, 'temporalImp': 'cpp', 'verbosity': 0}, 'trainSPNetOnlyIfRequested': False}, 'predictAheadTime': None, 'version': 1}
agpl-3.0
rameshvs/nipype
nipype/interfaces/spm/tests/test_utils.py
12
3427
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: import os from nipype.testing import (assert_equal, assert_false,assert_raises, assert_true, skipif, example_data) from nipype.interfaces.spm import no_spm import nipype.interfaces.spm.utils as spmu from nipype.interfaces.base import isdefined from nipype.utils.filemanip import split_filename, fname_presuffix from nipype.interfaces.base import TraitError def test_coreg(): moving = example_data(infile = 'functional.nii') target = example_data(infile = 'T1.nii') mat = example_data(infile = 'trans.mat') coreg = spmu.CalcCoregAffine(matlab_cmd = 'mymatlab') coreg.inputs.target = target assert_equal(coreg.inputs.matlab_cmd, 'mymatlab') coreg.inputs.moving = moving assert_equal( isdefined(coreg.inputs.mat),False) pth, mov, _ = split_filename(moving) _, tgt, _ = split_filename(target) mat = os.path.join(pth, '%s_to_%s.mat'%(mov,tgt)) invmat = fname_presuffix(mat, prefix = 'inverse_') scrpt = coreg._make_matlab_command(None) assert_equal(coreg.inputs.mat, mat) assert_equal( coreg.inputs.invmat, invmat) def test_apply_transform(): moving = example_data(infile = 'functional.nii') mat = example_data(infile = 'trans.mat') applymat = spmu.ApplyTransform(matlab_cmd = 'mymatlab') assert_equal( applymat.inputs.matlab_cmd, 'mymatlab' ) applymat.inputs.in_file = moving applymat.inputs.mat = mat scrpt = applymat._make_matlab_command(None) expected = '[p n e v] = spm_fileparts(V.fname);' assert_equal( expected in scrpt, True) expected = 'V.mat = transform.M * V.mat;' assert_equal(expected in scrpt, True) def test_reslice(): moving = example_data(infile = 'functional.nii') space_defining = example_data(infile = 'T1.nii') reslice = spmu.Reslice(matlab_cmd = 'mymatlab_version') assert_equal( reslice.inputs.matlab_cmd, 'mymatlab_version') reslice.inputs.in_file = moving reslice.inputs.space_defining = space_defining assert_equal( reslice.inputs.interp, 0) assert_raises(TraitError,reslice.inputs.trait_set,interp = 'nearest') assert_raises(TraitError, reslice.inputs.trait_set, interp = 10) reslice.inputs.interp = 1 script = reslice._make_matlab_command(None) outfile = fname_presuffix(moving, prefix='r') assert_equal(reslice.inputs.out_file, outfile) expected = '\nflags.mean=0;\nflags.which=1;\nflags.mask=0;' assert_equal(expected in script.replace(' ',''), True) expected_interp = 'flags.interp = 1;\n' assert_equal(expected_interp in script, True) assert_equal('spm_reslice(invols, flags);' in script, True) def test_dicom_import(): dicom = example_data(infile = 'dicomdir/123456-1-1.dcm') di = spmu.DicomImport(matlab_cmd = 'mymatlab') assert_equal(di.inputs.matlab_cmd, 'mymatlab') assert_equal(di.inputs.output_dir_struct, 'flat') assert_equal(di.inputs.output_dir, './converted_dicom') assert_equal(di.inputs.format, 'nii') assert_equal(di.inputs.icedims, False) assert_raises(TraitError,di.inputs.trait_set,output_dir_struct = 'wrong') assert_raises(TraitError,di.inputs.trait_set,format = 'FAT') assert_raises(TraitError,di.inputs.trait_set,in_files = ['does_sfd_not_32fn_exist.dcm']) di.inputs.in_files = [dicom] assert_equal(di.inputs.in_files, [dicom])
bsd-3-clause
ns950/calibre
src/calibre/ebooks/pdf/pdftohtml.py
2
6650
# -*- coding: utf-8 -*- __license__ = 'GPL 3' __copyright__ = '2008, Kovid Goyal <kovid at kovidgoyal.net>, ' \ '2009, John Schember <john@nachtimwald.com>' __docformat__ = 'restructuredtext en' import errno, os, sys, subprocess, shutil, re from functools import partial from calibre.ebooks import ConversionError, DRMError from calibre.ebooks.chardet import xml_to_unicode from calibre.ptempfile import PersistentTemporaryFile from calibre.constants import (isosx, iswindows, islinux, isbsd, filesystem_encoding) from calibre import CurrentDir from calibre.utils.cleantext import clean_xml_chars PDFTOHTML = 'pdftohtml' popen = subprocess.Popen if isosx and hasattr(sys, 'frameworks_dir'): PDFTOHTML = os.path.join(getattr(sys, 'frameworks_dir'), PDFTOHTML) if iswindows and hasattr(sys, 'frozen'): base = sys.extensions_location if hasattr(sys, 'new_app_layout') else os.path.dirname(sys.executable) PDFTOHTML = os.path.join(base, 'pdftohtml.exe') popen = partial(subprocess.Popen, creationflags=0x08) # CREATE_NO_WINDOW=0x08 so that no ugly console is popped up if (islinux or isbsd) and getattr(sys, 'frozen', False): PDFTOHTML = os.path.join(sys.executables_location, 'bin', 'pdftohtml') def pdftohtml(output_dir, pdf_path, no_images, as_xml=False): ''' Convert the pdf into html using the pdftohtml app. This will write the html as index.html into output_dir. It will also write all extracted images to the output_dir ''' pdfsrc = os.path.join(output_dir, u'src.pdf') index = os.path.join(output_dir, u'index.'+('xml' if as_xml else 'html')) with open(pdf_path, 'rb') as src, open(pdfsrc, 'wb') as dest: shutil.copyfileobj(src, dest) with CurrentDir(output_dir): # This is necessary as pdftohtml doesn't always (linux) respect # absolute paths. Also, it allows us to safely pass only bytestring # arguments to subprocess on widows # subprocess in python 2 cannot handle unicode arguments on windows # that cannot be encoded with mbcs. Ensure all args are # bytestrings. def a(x): return os.path.basename(x).encode('ascii') exe = PDFTOHTML.encode(filesystem_encoding) if isinstance(PDFTOHTML, unicode) else PDFTOHTML cmd = [exe, b'-enc', b'UTF-8', b'-noframes', b'-p', b'-nomerge', b'-nodrm', b'-q', a(pdfsrc), a(index)] if isbsd: cmd.remove(b'-nodrm') if no_images: cmd.append(b'-i') if as_xml: cmd.append('-xml') logf = PersistentTemporaryFile(u'pdftohtml_log') try: p = popen(cmd, stderr=logf._fd, stdout=logf._fd, stdin=subprocess.PIPE) except OSError as err: if err.errno == errno.ENOENT: raise ConversionError( _('Could not find pdftohtml, check it is in your PATH')) else: raise while True: try: ret = p.wait() break except OSError as e: if e.errno == errno.EINTR: continue else: raise logf.flush() logf.close() out = open(logf.name, 'rb').read().strip() if ret != 0: raise ConversionError(b'pdftohtml failed with return code: %d\n%s' % (ret, out)) if out: print "pdftohtml log:" print out if not os.path.exists(index) or os.stat(index).st_size < 100: raise DRMError() if not as_xml: with open(index, 'r+b') as i: raw = i.read() raw = flip_images(raw) raw = '<!-- created by calibre\'s pdftohtml -->\n' + raw i.seek(0) i.truncate() # versions of pdftohtml >= 0.20 output self closing <br> tags, this # breaks the pdf heuristics regexps, so replace them raw = raw.replace(b'<br/>', b'<br>') raw = re.sub(br'<a\s+name=(\d+)', br'<a id="\1"', raw, flags=re.I) i.write(raw) cmd = [exe, b'-f', b'1', '-l', '1', b'-xml', b'-i', b'-enc', b'UTF-8', b'-noframes', b'-p', b'-nomerge', b'-nodrm', b'-q', b'-stdout', a(pdfsrc)] p = popen(cmd, stdout=subprocess.PIPE) raw = p.stdout.read().strip() if p.wait() == 0 and raw: parse_outline(raw, output_dir) if isbsd: cmd.remove(b'-nodrm') try: os.remove(pdfsrc) except: pass def parse_outline(raw, output_dir): from lxml import etree from calibre.ebooks.oeb.parse_utils import RECOVER_PARSER raw = clean_xml_chars(xml_to_unicode(raw, strip_encoding_pats=True, assume_utf8=True)[0]) outline = etree.fromstring(raw, parser=RECOVER_PARSER).xpath('(//outline)[1]') if outline: from calibre.ebooks.oeb.polish.toc import TOC, create_ncx outline = outline[0] toc = TOC() count = [0] def process_node(node, toc): for child in node.iterdescendants('*'): if child.tag == 'outline': parent = toc.children[-1] if toc.children else toc process_node(child, parent) else: page = child.get('page', '1') toc.add(child.text, 'index.html', page) count[0] += 1 process_node(outline, toc) if count[0] > 2: root = create_ncx(toc, (lambda x:x), 'pdftohtml', 'en', 'pdftohtml') with open(os.path.join(output_dir, 'toc.ncx'), 'wb') as f: f.write(etree.tostring(root, pretty_print=True, with_tail=False, encoding='utf-8', xml_declaration=True)) def flip_image(img, flip): from calibre.utils.magick import Image im = Image() im.open(img) if b'x' in flip: im.flip(True) if b'y' in flip: im.flip() im.save(img) def flip_images(raw): for match in re.finditer(b'<IMG[^>]+/?>', raw, flags=re.I): img = match.group() m = re.search(br'class="(x|y|xy)flip"', img) if m is None: continue flip = m.group(1) src = re.search(br'src="([^"]+)"', img) if src is None: continue img = src.group(1) if not os.path.exists(img): continue flip_image(img, flip) raw = re.sub(br'<STYLE.+?</STYLE>\s*', b'', raw, flags=re.I|re.DOTALL) return raw
gpl-3.0
asis92/kernel-lp-lg-d802
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
mlavin/django
django/contrib/gis/gdal/prototypes/srs.py
88
3540
from ctypes import POINTER, c_char_p, c_int, c_void_p from django.contrib.gis.gdal.libgdal import lgdal, std_call from django.contrib.gis.gdal.prototypes.generation import ( const_string_output, double_output, int_output, srs_output, string_output, void_output, ) # Shortcut generation for routines with known parameters. def srs_double(f): """ Create a function prototype for the OSR routines that take the OSRSpatialReference object and return a double value. """ return double_output(f, [c_void_p, POINTER(c_int)], errcheck=True) def units_func(f): """ Create a ctypes function prototype for OSR units functions, e.g., OSRGetAngularUnits, OSRGetLinearUnits. """ return double_output(f, [c_void_p, POINTER(c_char_p)], strarg=True) # Creation & destruction. clone_srs = srs_output(std_call('OSRClone'), [c_void_p]) new_srs = srs_output(std_call('OSRNewSpatialReference'), [c_char_p]) release_srs = void_output(lgdal.OSRRelease, [c_void_p], errcheck=False) destroy_srs = void_output(std_call('OSRDestroySpatialReference'), [c_void_p], errcheck=False) srs_validate = void_output(lgdal.OSRValidate, [c_void_p]) # Getting the semi_major, semi_minor, and flattening functions. semi_major = srs_double(lgdal.OSRGetSemiMajor) semi_minor = srs_double(lgdal.OSRGetSemiMinor) invflattening = srs_double(lgdal.OSRGetInvFlattening) # WKT, PROJ, EPSG, XML importation routines. from_wkt = void_output(lgdal.OSRImportFromWkt, [c_void_p, POINTER(c_char_p)]) from_proj = void_output(lgdal.OSRImportFromProj4, [c_void_p, c_char_p]) from_epsg = void_output(std_call('OSRImportFromEPSG'), [c_void_p, c_int]) from_xml = void_output(lgdal.OSRImportFromXML, [c_void_p, c_char_p]) from_user_input = void_output(std_call('OSRSetFromUserInput'), [c_void_p, c_char_p]) # Morphing to/from ESRI WKT. morph_to_esri = void_output(lgdal.OSRMorphToESRI, [c_void_p]) morph_from_esri = void_output(lgdal.OSRMorphFromESRI, [c_void_p]) # Identifying the EPSG identify_epsg = void_output(lgdal.OSRAutoIdentifyEPSG, [c_void_p]) # Getting the angular_units, linear_units functions linear_units = units_func(lgdal.OSRGetLinearUnits) angular_units = units_func(lgdal.OSRGetAngularUnits) # For exporting to WKT, PROJ.4, "Pretty" WKT, and XML. to_wkt = string_output(std_call('OSRExportToWkt'), [c_void_p, POINTER(c_char_p)], decoding='utf-8') to_proj = string_output(std_call('OSRExportToProj4'), [c_void_p, POINTER(c_char_p)], decoding='ascii') to_pretty_wkt = string_output( std_call('OSRExportToPrettyWkt'), [c_void_p, POINTER(c_char_p), c_int], offset=-2, decoding='utf-8' ) # Memory leak fixed in GDAL 1.5; still exists in 1.4. to_xml = string_output(lgdal.OSRExportToXML, [c_void_p, POINTER(c_char_p), c_char_p], offset=-2, decoding='utf-8') # String attribute retrival routines. get_attr_value = const_string_output(std_call('OSRGetAttrValue'), [c_void_p, c_char_p, c_int], decoding='utf-8') get_auth_name = const_string_output(lgdal.OSRGetAuthorityName, [c_void_p, c_char_p], decoding='ascii') get_auth_code = const_string_output(lgdal.OSRGetAuthorityCode, [c_void_p, c_char_p], decoding='ascii') # SRS Properties isgeographic = int_output(lgdal.OSRIsGeographic, [c_void_p]) islocal = int_output(lgdal.OSRIsLocal, [c_void_p]) isprojected = int_output(lgdal.OSRIsProjected, [c_void_p]) # Coordinate transformation new_ct = srs_output(std_call('OCTNewCoordinateTransformation'), [c_void_p, c_void_p]) destroy_ct = void_output(std_call('OCTDestroyCoordinateTransformation'), [c_void_p], errcheck=False)
bsd-3-clause
wittyameta/elasticsearch
dev-tools/create_bwc_repo_with_ancient_indices.py
160
2738
import create_bwc_index import logging import os import shutil import subprocess import sys import tempfile def fetch_version(version): logging.info('fetching ES version %s' % version) if subprocess.call([sys.executable, os.path.join(os.path.split(sys.argv[0])[0], 'get-bwc-version.py'), version]) != 0: raise RuntimeError('failed to download ES version %s' % version) def main(): ''' Creates a back compat index (.zip) using v0.20 and then creates a snapshot of it using v1.1 ''' logging.basicConfig(format='[%(levelname)s] [%(asctime)s] %(message)s', level=logging.INFO, datefmt='%Y-%m-%d %I:%M:%S %p') logging.getLogger('elasticsearch').setLevel(logging.ERROR) logging.getLogger('urllib3').setLevel(logging.WARN) tmp_dir = tempfile.mkdtemp() try: data_dir = os.path.join(tmp_dir, 'data') logging.info('Temp data dir: %s' % data_dir) first_version = '0.20.6' second_version = '1.1.2' index_name = 'index-%s-and-%s' % (first_version, second_version) # Download old ES releases if necessary: release_dir = os.path.join('backwards', 'elasticsearch-%s' % first_version) if not os.path.exists(release_dir): fetch_version(first_version) node = create_bwc_index.start_node(first_version, release_dir, data_dir, cluster_name=index_name) client = create_bwc_index.create_client() # Creates the index & indexes docs w/ first_version: create_bwc_index.generate_index(client, first_version, index_name) # Make sure we write segments: flush_result = client.indices.flush(index=index_name) if not flush_result['ok']: raise RuntimeError('flush failed: %s' % str(flush_result)) create_bwc_index.shutdown_node(node) print('%s server output:\n%s' % (first_version, node.stdout.read().decode('utf-8'))) node = None release_dir = os.path.join('backwards', 'elasticsearch-%s' % second_version) if not os.path.exists(release_dir): fetch_version(second_version) # Now use second_version to snapshot the index: node = create_bwc_index.start_node(second_version, release_dir, data_dir, cluster_name=index_name) client = create_bwc_index.create_client() repo_dir = os.path.join(tmp_dir, 'repo') create_bwc_index.snapshot_index(client, second_version, repo_dir) create_bwc_index.shutdown_node(node) print('%s server output:\n%s' % (second_version, node.stdout.read().decode('utf-8'))) create_bwc_index.compress(tmp_dir, "src/test/resources/indices/bwc", 'unsupportedrepo-%s.zip' % first_version, 'repo') node = None finally: if node is not None: create_bwc_index.shutdown_node(node) shutil.rmtree(tmp_dir) if __name__ == '__main__': main()
apache-2.0
hawkeyexp/plugin.video.netflix
packages/mysql-connector-python/mysql/connector/django/client.py
1
1789
# MySQL Connector/Python - MySQL driver written in Python. import django import subprocess from django.db.backends.base.client import BaseDatabaseClient class DatabaseClient(BaseDatabaseClient): executable_name = 'mysql' @classmethod def settings_to_cmd_args(cls, settings_dict): args = [cls.executable_name] db = settings_dict['OPTIONS'].get('database', settings_dict['NAME']) user = settings_dict['OPTIONS'].get('user', settings_dict['USER']) passwd = settings_dict['OPTIONS'].get('password', settings_dict['PASSWORD']) host = settings_dict['OPTIONS'].get('host', settings_dict['HOST']) port = settings_dict['OPTIONS'].get('port', settings_dict['PORT']) defaults_file = settings_dict['OPTIONS'].get('read_default_file') # --defaults-file should always be the first option if defaults_file: args.append("--defaults-file={0}".format(defaults_file)) # We force SQL_MODE to TRADITIONAL args.append("--init-command=SET @@session.SQL_MODE=TRADITIONAL") if user: args.append("--user={0}".format(user)) if passwd: args.append("--password={0}".format(passwd)) if host: if '/' in host: args.append("--socket={0}".format(host)) else: args.append("--host={0}".format(host)) if port: args.append("--port={0}".format(port)) if db: args.append("--database={0}".format(db)) return args def runshell(self): args = DatabaseClient.settings_to_cmd_args( self.connection.settings_dict) subprocess.call(args)
mit
ZhuangER/robot_path_planning
gui/pyqtgraph/pgcollections.py
48
15608
# -*- coding: utf-8 -*- """ advancedTypes.py - Basic data structures not included with python Copyright 2010 Luke Campagnola Distributed under MIT/X11 license. See license.txt for more infomation. Includes: - OrderedDict - Dictionary which preserves the order of its elements - BiDict, ReverseDict - Bi-directional dictionaries - ThreadsafeDict, ThreadsafeList - Self-mutexed data structures """ import threading, sys, copy, collections #from debug import * try: from collections import OrderedDict except ImportError: # fallback: try to use the ordereddict backport when using python 2.6 from ordereddict import OrderedDict class ReverseDict(dict): """extends dict so that reverse lookups are possible by requesting the key as a list of length 1: d = BiDict({'x': 1, 'y': 2}) d['x'] 1 d[[2]] 'y' """ def __init__(self, data=None): if data is None: data = {} self.reverse = {} for k in data: self.reverse[data[k]] = k dict.__init__(self, data) def __getitem__(self, item): if type(item) is list: return self.reverse[item[0]] else: return dict.__getitem__(self, item) def __setitem__(self, item, value): self.reverse[value] = item dict.__setitem__(self, item, value) def __deepcopy__(self, memo): raise Exception("deepcopy not implemented") class BiDict(dict): """extends dict so that reverse lookups are possible by adding each reverse combination to the dict. This only works if all values and keys are unique.""" def __init__(self, data=None): if data is None: data = {} dict.__init__(self) for k in data: self[data[k]] = k def __setitem__(self, item, value): dict.__setitem__(self, item, value) dict.__setitem__(self, value, item) def __deepcopy__(self, memo): raise Exception("deepcopy not implemented") class ThreadsafeDict(dict): """Extends dict so that getitem, setitem, and contains are all thread-safe. Also adds lock/unlock functions for extended exclusive operations Converts all sub-dicts and lists to threadsafe as well. """ def __init__(self, *args, **kwargs): self.mutex = threading.RLock() dict.__init__(self, *args, **kwargs) for k in self: if type(self[k]) is dict: self[k] = ThreadsafeDict(self[k]) def __getitem__(self, attr): self.lock() try: val = dict.__getitem__(self, attr) finally: self.unlock() return val def __setitem__(self, attr, val): if type(val) is dict: val = ThreadsafeDict(val) self.lock() try: dict.__setitem__(self, attr, val) finally: self.unlock() def __contains__(self, attr): self.lock() try: val = dict.__contains__(self, attr) finally: self.unlock() return val def __len__(self): self.lock() try: val = dict.__len__(self) finally: self.unlock() return val def clear(self): self.lock() try: dict.clear(self) finally: self.unlock() def lock(self): self.mutex.acquire() def unlock(self): self.mutex.release() def __deepcopy__(self, memo): raise Exception("deepcopy not implemented") class ThreadsafeList(list): """Extends list so that getitem, setitem, and contains are all thread-safe. Also adds lock/unlock functions for extended exclusive operations Converts all sub-lists and dicts to threadsafe as well. """ def __init__(self, *args, **kwargs): self.mutex = threading.RLock() list.__init__(self, *args, **kwargs) for k in self: self[k] = mkThreadsafe(self[k]) def __getitem__(self, attr): self.lock() try: val = list.__getitem__(self, attr) finally: self.unlock() return val def __setitem__(self, attr, val): val = makeThreadsafe(val) self.lock() try: list.__setitem__(self, attr, val) finally: self.unlock() def __contains__(self, attr): self.lock() try: val = list.__contains__(self, attr) finally: self.unlock() return val def __len__(self): self.lock() try: val = list.__len__(self) finally: self.unlock() return val def lock(self): self.mutex.acquire() def unlock(self): self.mutex.release() def __deepcopy__(self, memo): raise Exception("deepcopy not implemented") def makeThreadsafe(obj): if type(obj) is dict: return ThreadsafeDict(obj) elif type(obj) is list: return ThreadsafeList(obj) elif type(obj) in [str, int, float, bool, tuple]: return obj else: raise Exception("Not sure how to make object of type %s thread-safe" % str(type(obj))) class Locker(object): def __init__(self, lock): self.lock = lock self.lock.acquire() def __del__(self): try: self.lock.release() except: pass class CaselessDict(OrderedDict): """Case-insensitive dict. Values can be set and retrieved using keys of any case. Note that when iterating, the original case is returned for each key.""" def __init__(self, *args): OrderedDict.__init__(self, {}) ## requirement for the empty {} here seems to be a python bug? self.keyMap = OrderedDict([(k.lower(), k) for k in OrderedDict.keys(self)]) if len(args) == 0: return elif len(args) == 1 and isinstance(args[0], dict): for k in args[0]: self[k] = args[0][k] else: raise Exception("CaselessDict may only be instantiated with a single dict.") #def keys(self): #return self.keyMap.values() def __setitem__(self, key, val): kl = key.lower() if kl in self.keyMap: OrderedDict.__setitem__(self, self.keyMap[kl], val) else: OrderedDict.__setitem__(self, key, val) self.keyMap[kl] = key def __getitem__(self, key): kl = key.lower() if kl not in self.keyMap: raise KeyError(key) return OrderedDict.__getitem__(self, self.keyMap[kl]) def __contains__(self, key): return key.lower() in self.keyMap def update(self, d): for k, v in d.iteritems(): self[k] = v def copy(self): return CaselessDict(OrderedDict.copy(self)) def __delitem__(self, key): kl = key.lower() if kl not in self.keyMap: raise KeyError(key) OrderedDict.__delitem__(self, self.keyMap[kl]) del self.keyMap[kl] def __deepcopy__(self, memo): raise Exception("deepcopy not implemented") def clear(self): OrderedDict.clear(self) self.keyMap.clear() class ProtectedDict(dict): """ A class allowing read-only 'view' of a dict. The object can be treated like a normal dict, but will never modify the original dict it points to. Any values accessed from the dict will also be read-only. """ def __init__(self, data): self._data_ = data ## List of methods to directly wrap from _data_ wrapMethods = ['_cmp_', '__contains__', '__eq__', '__format__', '__ge__', '__gt__', '__le__', '__len__', '__lt__', '__ne__', '__reduce__', '__reduce_ex__', '__repr__', '__str__', 'count', 'has_key', 'iterkeys', 'keys', ] ## List of methods which wrap from _data_ but return protected results protectMethods = ['__getitem__', '__iter__', 'get', 'items', 'values'] ## List of methods to disable disableMethods = ['__delitem__', '__setitem__', 'clear', 'pop', 'popitem', 'setdefault', 'update'] ## Template methods def wrapMethod(methodName): return lambda self, *a, **k: getattr(self._data_, methodName)(*a, **k) def protectMethod(methodName): return lambda self, *a, **k: protect(getattr(self._data_, methodName)(*a, **k)) def error(self, *args, **kargs): raise Exception("Can not modify read-only list.") ## Directly (and explicitly) wrap some methods from _data_ ## Many of these methods can not be intercepted using __getattribute__, so they ## must be implemented explicitly for methodName in wrapMethods: locals()[methodName] = wrapMethod(methodName) ## Wrap some methods from _data_ with the results converted to protected objects for methodName in protectMethods: locals()[methodName] = protectMethod(methodName) ## Disable any methods that could change data in the list for methodName in disableMethods: locals()[methodName] = error ## Add a few extra methods. def copy(self): raise Exception("It is not safe to copy protected dicts! (instead try deepcopy, but be careful.)") def itervalues(self): for v in self._data_.itervalues(): yield protect(v) def iteritems(self): for k, v in self._data_.iteritems(): yield (k, protect(v)) def deepcopy(self): return copy.deepcopy(self._data_) def __deepcopy__(self, memo): return copy.deepcopy(self._data_, memo) class ProtectedList(collections.Sequence): """ A class allowing read-only 'view' of a list or dict. The object can be treated like a normal list, but will never modify the original list it points to. Any values accessed from the list will also be read-only. Note: It would be nice if we could inherit from list or tuple so that isinstance checks would work. However, doing this causes tuple(obj) to return unprotected results (importantly, this means unpacking into function arguments will also fail) """ def __init__(self, data): self._data_ = data #self.__mro__ = (ProtectedList, object) ## List of methods to directly wrap from _data_ wrapMethods = ['__contains__', '__eq__', '__format__', '__ge__', '__gt__', '__le__', '__len__', '__lt__', '__ne__', '__reduce__', '__reduce_ex__', '__repr__', '__str__', 'count', 'index'] ## List of methods which wrap from _data_ but return protected results protectMethods = ['__getitem__', '__getslice__', '__mul__', '__reversed__', '__rmul__'] ## List of methods to disable disableMethods = ['__delitem__', '__delslice__', '__iadd__', '__imul__', '__setitem__', '__setslice__', 'append', 'extend', 'insert', 'pop', 'remove', 'reverse', 'sort'] ## Template methods def wrapMethod(methodName): return lambda self, *a, **k: getattr(self._data_, methodName)(*a, **k) def protectMethod(methodName): return lambda self, *a, **k: protect(getattr(self._data_, methodName)(*a, **k)) def error(self, *args, **kargs): raise Exception("Can not modify read-only list.") ## Directly (and explicitly) wrap some methods from _data_ ## Many of these methods can not be intercepted using __getattribute__, so they ## must be implemented explicitly for methodName in wrapMethods: locals()[methodName] = wrapMethod(methodName) ## Wrap some methods from _data_ with the results converted to protected objects for methodName in protectMethods: locals()[methodName] = protectMethod(methodName) ## Disable any methods that could change data in the list for methodName in disableMethods: locals()[methodName] = error ## Add a few extra methods. def __iter__(self): for item in self._data_: yield protect(item) def __add__(self, op): if isinstance(op, ProtectedList): return protect(self._data_.__add__(op._data_)) elif isinstance(op, list): return protect(self._data_.__add__(op)) else: raise TypeError("Argument must be a list.") def __radd__(self, op): if isinstance(op, ProtectedList): return protect(op._data_.__add__(self._data_)) elif isinstance(op, list): return protect(op.__add__(self._data_)) else: raise TypeError("Argument must be a list.") def deepcopy(self): return copy.deepcopy(self._data_) def __deepcopy__(self, memo): return copy.deepcopy(self._data_, memo) def poop(self): raise Exception("This is a list. It does not poop.") class ProtectedTuple(collections.Sequence): """ A class allowing read-only 'view' of a tuple. The object can be treated like a normal tuple, but its contents will be returned as protected objects. Note: It would be nice if we could inherit from list or tuple so that isinstance checks would work. However, doing this causes tuple(obj) to return unprotected results (importantly, this means unpacking into function arguments will also fail) """ def __init__(self, data): self._data_ = data ## List of methods to directly wrap from _data_ wrapMethods = ['__contains__', '__eq__', '__format__', '__ge__', '__getnewargs__', '__gt__', '__hash__', '__le__', '__len__', '__lt__', '__ne__', '__reduce__', '__reduce_ex__', '__repr__', '__str__', 'count', 'index'] ## List of methods which wrap from _data_ but return protected results protectMethods = ['__getitem__', '__getslice__', '__iter__', '__add__', '__mul__', '__reversed__', '__rmul__'] ## Template methods def wrapMethod(methodName): return lambda self, *a, **k: getattr(self._data_, methodName)(*a, **k) def protectMethod(methodName): return lambda self, *a, **k: protect(getattr(self._data_, methodName)(*a, **k)) ## Directly (and explicitly) wrap some methods from _data_ ## Many of these methods can not be intercepted using __getattribute__, so they ## must be implemented explicitly for methodName in wrapMethods: locals()[methodName] = wrapMethod(methodName) ## Wrap some methods from _data_ with the results converted to protected objects for methodName in protectMethods: locals()[methodName] = protectMethod(methodName) ## Add a few extra methods. def deepcopy(self): return copy.deepcopy(self._data_) def __deepcopy__(self, memo): return copy.deepcopy(self._data_, memo) def protect(obj): if isinstance(obj, dict): return ProtectedDict(obj) elif isinstance(obj, list): return ProtectedList(obj) elif isinstance(obj, tuple): return ProtectedTuple(obj) else: return obj if __name__ == '__main__': d = {'x': 1, 'y': [1,2], 'z': ({'a': 2, 'b': [3,4], 'c': (5,6)}, 1, 2)} dp = protect(d) l = [1, 'x', ['a', 'b'], ('c', 'd'), {'x': 1, 'y': 2}] lp = protect(l) t = (1, 'x', ['a', 'b'], ('c', 'd'), {'x': 1, 'y': 2}) tp = protect(t)
mit
nitzmahone/ansible-modules-extras
system/iptables.py
41
20990
#!/usr/bin/python # -*- coding: utf-8 -*- # # (c) 2015, Linus Unnebäck <linus@folkdatorn.se> # # This file is part of Ansible # # This module is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software. If not, see <http://www.gnu.org/licenses/>. BINS = dict( ipv4='iptables', ipv6='ip6tables', ) DOCUMENTATION = ''' --- module: iptables short_description: Modify the systems iptables requirements: [] version_added: "2.0" author: Linus Unnebäck (@LinusU) <linus@folkdatorn.se> description: - Iptables is used to set up, maintain, and inspect the tables of IP packet filter rules in the Linux kernel. This module does not handle the saving and/or loading of rules, but rather only manipulates the current rules that are present in memory. This is the same as the behaviour of the "iptables" and "ip6tables" command which this module uses internally. notes: - This module just deals with individual rules. If you need advanced chaining of rules the recommended way is to template the iptables restore file. options: table: description: - This option specifies the packet matching table which the command should operate on. If the kernel is configured with automatic module loading, an attempt will be made to load the appropriate module for that table if it is not already there. required: false default: filter choices: [ "filter", "nat", "mangle", "raw", "security" ] state: description: - Whether the rule should be absent or present. required: false default: present choices: [ "present", "absent" ] action: version_added: "2.2" description: - Whether the rule should be appended at the bottom or inserted at the top. If the rule already exists the chain won't be modified. required: false default: append choices: [ "append", "insert" ] ip_version: description: - Which version of the IP protocol this rule should apply to. required: false default: ipv4 choices: [ "ipv4", "ipv6" ] chain: description: - "Chain to operate on. This option can either be the name of a user defined chain or any of the builtin chains: 'INPUT', 'FORWARD', 'OUTPUT', 'PREROUTING', 'POSTROUTING', 'SECMARK', 'CONNSECMARK'." required: false protocol: description: - The protocol of the rule or of the packet to check. The specified protocol can be one of tcp, udp, udplite, icmp, esp, ah, sctp or the special keyword "all", or it can be a numeric value, representing one of these protocols or a different one. A protocol name from /etc/protocols is also allowed. A "!" argument before the protocol inverts the test. The number zero is equivalent to all. "all" will match with all protocols and is taken as default when this option is omitted. required: false default: null source: description: - Source specification. Address can be either a network name, a hostname, a network IP address (with /mask), or a plain IP address. Hostnames will be resolved once only, before the rule is submitted to the kernel. Please note that specifying any name to be resolved with a remote query such as DNS is a really bad idea. The mask can be either a network mask or a plain number, specifying the number of 1's at the left side of the network mask. Thus, a mask of 24 is equivalent to 255.255.255.0. A "!" argument before the address specification inverts the sense of the address. required: false default: null destination: description: - Destination specification. Address can be either a network name, a hostname, a network IP address (with /mask), or a plain IP address. Hostnames will be resolved once only, before the rule is submitted to the kernel. Please note that specifying any name to be resolved with a remote query such as DNS is a really bad idea. The mask can be either a network mask or a plain number, specifying the number of 1's at the left side of the network mask. Thus, a mask of 24 is equivalent to 255.255.255.0. A "!" argument before the address specification inverts the sense of the address. required: false default: null match: description: - Specifies a match to use, that is, an extension module that tests for a specific property. The set of matches make up the condition under which a target is invoked. Matches are evaluated first to last if specified as an array and work in short-circuit fashion, i.e. if one extension yields false, evaluation will stop. required: false default: [] jump: description: - This specifies the target of the rule; i.e., what to do if the packet matches it. The target can be a user-defined chain (other than the one this rule is in), one of the special builtin targets which decide the fate of the packet immediately, or an extension (see EXTENSIONS below). If this option is omitted in a rule (and the goto paramater is not used), then matching the rule will have no effect on the packet's fate, but the counters on the rule will be incremented. required: false default: null goto: description: - This specifies that the processing should continue in a user specified chain. Unlike the jump argument return will not continue processing in this chain but instead in the chain that called us via jump. required: false default: null in_interface: description: - Name of an interface via which a packet was received (only for packets entering the INPUT, FORWARD and PREROUTING chains). When the "!" argument is used before the interface name, the sense is inverted. If the interface name ends in a "+", then any interface which begins with this name will match. If this option is omitted, any interface name will match. required: false default: null out_interface: description: - Name of an interface via which a packet is going to be sent (for packets entering the FORWARD, OUTPUT and POSTROUTING chains). When the "!" argument is used before the interface name, the sense is inverted. If the interface name ends in a "+", then any interface which begins with this name will match. If this option is omitted, any interface name will match. required: false default: null fragment: description: - This means that the rule only refers to second and further fragments of fragmented packets. Since there is no way to tell the source or destination ports of such a packet (or ICMP type), such a packet will not match any rules which specify them. When the "!" argument precedes fragment argument, the rule will only match head fragments, or unfragmented packets. required: false default: null set_counters: description: - This enables the administrator to initialize the packet and byte counters of a rule (during INSERT, APPEND, REPLACE operations). required: false default: null source_port: description: - "Source port or port range specification. This can either be a service name or a port number. An inclusive range can also be specified, using the format first:last. If the first port is omitted, '0' is assumed; if the last is omitted, '65535' is assumed. If the first port is greater than the second one they will be swapped." required: false default: null destination_port: description: - "Destination port or port range specification. This can either be a service name or a port number. An inclusive range can also be specified, using the format first:last. If the first port is omitted, '0' is assumed; if the last is omitted, '65535' is assumed. If the first port is greater than the second one they will be swapped." required: false default: null to_ports: description: - "This specifies a destination port or range of ports to use: without this, the destination port is never altered. This is only valid if the rule also specifies one of the following protocols: tcp, udp, dccp or sctp." required: false default: null to_destination: version_added: "2.1" description: - "This specifies a destination address to use with DNAT: without this, the destination address is never altered." required: false default: null to_source: version_added: "2.2" description: - "This specifies a source address to use with SNAT: without this, the source address is never altered." required: false default: null set_dscp_mark: version_added: "2.1" description: - "This allows specifying a DSCP mark to be added to packets. It takes either an integer or hex value. Mutually exclusive with C(set_dscp_mark_class)." required: false default: null set_dscp_mark_class: version_added: "2.1" description: - "This allows specifying a predefined DiffServ class which will be translated to the corresponding DSCP mark. Mutually exclusive with C(set_dscp_mark)." required: false default: null comment: description: - "This specifies a comment that will be added to the rule" required: false default: null ctstate: description: - "ctstate is a list of the connection states to match in the conntrack module. Possible states are: 'INVALID', 'NEW', 'ESTABLISHED', 'RELATED', 'UNTRACKED', 'SNAT', 'DNAT'" required: false default: [] limit: description: - "Specifies the maximum average number of matches to allow per second. The number can specify units explicitly, using `/second', `/minute', `/hour' or `/day', or parts of them (so `5/second' is the same as `5/s')." required: false default: null limit_burst: version_added: "2.1" description: - "Specifies the maximum burst before the above limit kicks in." required: false default: null uid_owner: version_added: "2.1" description: - "Specifies the UID or username to use in match by owner rule." required: false reject_with: version_added: "2.1" description: - "Specifies the error packet type to return while rejecting." required: false icmp_type: version_added: "2.2" description: - "This allows specification of the ICMP type, which can be a numeric ICMP type, type/code pair, or one of the ICMP type names shown by the command 'iptables -p icmp -h'" required: false flush: version_added: "2.2" description: - "Flushes the specified table and chain of all rules. If no chain is specified then the entire table is purged. Ignores all other parameters." required: false policy: version_added: "2.2" description: - "Set the policy for the chain to the given target. Valid targets are ACCEPT, DROP, QUEUE, RETURN. Only built in chains can have policies. This parameter requires the chain parameter. Ignores all other parameters." ''' EXAMPLES = ''' # Block specific IP - iptables: chain=INPUT source=8.8.8.8 jump=DROP become: yes # Forward port 80 to 8600 - iptables: table=nat chain=PREROUTING in_interface=eth0 protocol=tcp match=tcp destination_port=80 jump=REDIRECT to_ports=8600 comment="Redirect web traffic to port 8600" become: yes # Allow related and established connections - iptables: chain=INPUT ctstate=ESTABLISHED,RELATED jump=ACCEPT become: yes # Tag all outbound tcp packets with DSCP mark 8 - iptables: chain=OUTPUT jump=DSCP table=mangle set_dscp_mark=8 protocol=tcp # Tag all outbound tcp packets with DSCP DiffServ class CS1 - iptables: chain=OUTPUT jump=DSCP table=mangle set_dscp_mark_class=CS1 protocol=tcp ''' def append_param(rule, param, flag, is_list): if is_list: for item in param: append_param(rule, item, flag, False) else: if param is not None: rule.extend([flag, param]) def append_csv(rule, param, flag): if param: rule.extend([flag, ','.join(param)]) def append_match(rule, param, match): if param: rule.extend(['-m', match]) def append_jump(rule, param, jump): if param: rule.extend(['-j', jump]) def construct_rule(params): rule = [] append_param(rule, params['protocol'], '-p', False) append_param(rule, params['source'], '-s', False) append_param(rule, params['destination'], '-d', False) append_param(rule, params['match'], '-m', True) append_param(rule, params['jump'], '-j', False) append_param(rule, params['to_destination'], '--to-destination', False) append_param(rule, params['to_source'], '--to-source', False) append_param(rule, params['goto'], '-g', False) append_param(rule, params['in_interface'], '-i', False) append_param(rule, params['out_interface'], '-o', False) append_param(rule, params['fragment'], '-f', False) append_param(rule, params['set_counters'], '-c', False) append_param(rule, params['source_port'], '--source-port', False) append_param(rule, params['destination_port'], '--destination-port', False) append_param(rule, params['to_ports'], '--to-ports', False) append_param(rule, params['set_dscp_mark'], '--set-dscp', False) append_param( rule, params['set_dscp_mark_class'], '--set-dscp-class', False) append_match(rule, params['comment'], 'comment') append_param(rule, params['comment'], '--comment', False) append_match(rule, params['ctstate'], 'state') append_csv(rule, params['ctstate'], '--state') append_match(rule, params['limit'] or params['limit_burst'], 'limit') append_param(rule, params['limit'], '--limit', False) append_param(rule, params['limit_burst'], '--limit-burst', False) append_match(rule, params['uid_owner'], 'owner') append_param(rule, params['uid_owner'], '--uid-owner', False) append_jump(rule, params['reject_with'], 'REJECT') append_param(rule, params['reject_with'], '--reject-with', False) append_param(rule, params['icmp_type'], '--icmp-type', False) return rule def push_arguments(iptables_path, action, params, make_rule=True): cmd = [iptables_path] cmd.extend(['-t', params['table']]) cmd.extend([action, params['chain']]) if make_rule: cmd.extend(construct_rule(params)) return cmd def check_present(iptables_path, module, params): cmd = push_arguments(iptables_path, '-C', params) rc, _, __ = module.run_command(cmd, check_rc=False) return (rc == 0) def append_rule(iptables_path, module, params): cmd = push_arguments(iptables_path, '-A', params) module.run_command(cmd, check_rc=True) def insert_rule(iptables_path, module, params): cmd = push_arguments(iptables_path, '-I', params) module.run_command(cmd, check_rc=True) def remove_rule(iptables_path, module, params): cmd = push_arguments(iptables_path, '-D', params) module.run_command(cmd, check_rc=True) def flush_table(iptables_path, module, params): cmd = push_arguments(iptables_path, '-F', params, make_rule=False) module.run_command(cmd, check_rc=True) def set_chain_policy(iptables_path, module, params): cmd = push_arguments(iptables_path, '-P', params, make_rule=False) cmd.append(params['policy']) module.run_command(cmd, check_rc=True) def main(): module = AnsibleModule( supports_check_mode=True, argument_spec=dict( table=dict( required=False, default='filter', choices=['filter', 'nat', 'mangle', 'raw', 'security']), state=dict( required=False, default='present', choices=['present', 'absent']), action=dict( required=False, default='append', type='str', choices=['append', 'insert']), ip_version=dict( required=False, default='ipv4', choices=['ipv4', 'ipv6']), chain=dict(required=False, default=None, type='str'), protocol=dict(required=False, default=None, type='str'), source=dict(required=False, default=None, type='str'), to_source=dict(required=False, default=None, type='str'), destination=dict(required=False, default=None, type='str'), to_destination=dict(required=False, default=None, type='str'), match=dict(required=False, default=[], type='list'), jump=dict(required=False, default=None, type='str'), goto=dict(required=False, default=None, type='str'), in_interface=dict(required=False, default=None, type='str'), out_interface=dict(required=False, default=None, type='str'), fragment=dict(required=False, default=None, type='str'), set_counters=dict(required=False, default=None, type='str'), source_port=dict(required=False, default=None, type='str'), destination_port=dict(required=False, default=None, type='str'), to_ports=dict(required=False, default=None, type='str'), set_dscp_mark=dict(required=False, default=None, type='str'), set_dscp_mark_class=dict(required=False, default=None, type='str'), comment=dict(required=False, default=None, type='str'), ctstate=dict(required=False, default=[], type='list'), limit=dict(required=False, default=None, type='str'), limit_burst=dict(required=False, default=None, type='str'), uid_owner=dict(required=False, default=None, type='str'), reject_with=dict(required=False, default=None, type='str'), icmp_type=dict(required=False, default=None, type='str'), flush=dict(required=False, default=False, type='bool'), policy=dict( required=False, default=None, type='str', choices=['ACCEPT', 'DROP', 'QUEUE', 'RETURN']), ), mutually_exclusive=( ['set_dscp_mark', 'set_dscp_mark_class'], ['flush', 'policy'], ), ) args = dict( changed=False, failed=False, ip_version=module.params['ip_version'], table=module.params['table'], chain=module.params['chain'], flush=module.params['flush'], rule=' '.join(construct_rule(module.params)), state=module.params['state'], ) ip_version = module.params['ip_version'] iptables_path = module.get_bin_path(BINS[ip_version], True) # Check if chain option is required if args['flush'] is False and args['chain'] is None: module.fail_json( msg="Either chain or flush parameter must be specified.") # Flush the table if args['flush'] is True: flush_table(iptables_path, module, module.params) module.exit_json(**args) # Set the policy if module.params['policy']: set_chain_policy(iptables_path, module, module.params) module.exit_json(**args) insert = (module.params['action'] == 'insert') rule_is_present = check_present(iptables_path, module, module.params) should_be_present = (args['state'] == 'present') # Check if target is up to date args['changed'] = (rule_is_present != should_be_present) # Check only; don't modify if module.check_mode: module.exit_json(changed=args['changed']) # Target is already up to date if args['changed'] is False: module.exit_json(**args) if should_be_present: if insert: insert_rule(iptables_path, module, module.params) else: append_rule(iptables_path, module, module.params) else: remove_rule(iptables_path, module, module.params) module.exit_json(**args) # import module snippets from ansible.module_utils.basic import * if __name__ == '__main__': main()
gpl-3.0
supergis/QGIS
python/ext-libs/pyspatialite/lib/dump.py
45
2375
# -*- coding: utf-8 -*- # Mimic the sqlite3 console shell's .dump command # Author: Paul Kippes <kippesp@gmail.com> def _iterdump(connection): """ Returns an iterator to the dump of the database in an SQL text format. Used to produce an SQL dump of the database. Useful to save an in-memory database for later restoration. This function should not be called directly but instead called from the Connection method, iterdump(). """ cu = connection.cursor() yield('BEGIN TRANSACTION;') # sqlite_master table contains the SQL CREATE statements for the database. q = """ SELECT name, type, sql FROM sqlite_master WHERE sql NOT NULL AND type == 'table' """ schema_res = cu.execute(q) for table_name, type, sql in schema_res.fetchall(): if table_name == 'sqlite_sequence': yield('DELETE FROM sqlite_sequence;') elif table_name == 'sqlite_stat1': yield('ANALYZE sqlite_master;') elif table_name.startswith('sqlite_'): continue # NOTE: Virtual table support not implemented #elif sql.startswith('CREATE VIRTUAL TABLE'): # qtable = table_name.replace("'", "''") # yield("INSERT INTO sqlite_master(type,name,tbl_name,rootpage,sql)"\ # "VALUES('table','%s','%s',0,'%s');" % # qtable, # qtable, # sql.replace("''")) else: yield('%s;' % sql) # Build the insert statement for each row of the current table res = cu.execute("PRAGMA table_info('%s')" % table_name) column_names = [str(table_info[1]) for table_info in res.fetchall()] q = "SELECT 'INSERT INTO \"%(tbl_name)s\" VALUES(" q += ",".join(["'||quote(" + col + ")||'" for col in column_names]) q += ")' FROM '%(tbl_name)s'" query_res = cu.execute(q % {'tbl_name': table_name}) for row in query_res: yield("%s;" % row[0]) # Now when the type is 'index', 'trigger', or 'view' q = """ SELECT name, type, sql FROM sqlite_master WHERE sql NOT NULL AND type IN ('index', 'trigger', 'view') """ schema_res = cu.execute(q) for name, type, sql in schema_res.fetchall(): yield('%s;' % sql) yield('COMMIT;')
gpl-2.0
SatoshiNXSimudrone/sl4a-damon-clone
python/gdata/tests/gdata_tests/apps/service_test_using_mock.py
87
2144
#!/usr/bin/python # # Copyright (C) 2007 SIOS Technology, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. __author__ = 'tmatsuo@sios.com (Takashi Matsuo)' import unittest try: from xml.etree import ElementTree except ImportError: from elementtree import ElementTree import atom import atom.service import atom.mock_service import gdata.apps import gdata.apps.service import getpass import time, os apps_domain = 'test.shehas.net' apps_username = 'xxxxx' apps_password = 'xxxxx' class AppsServiceUsingMockUnitTest01(unittest.TestCase): def setUp(self): email = apps_username + '@' + apps_domain self.apps_client = gdata.apps.service.AppsService( email=email, domain=apps_domain, password=apps_password, source='AppsClient "Unit" Tests') self.apps_client.handler = atom.mock_service datafile = os.path.join(os.path.dirname(os.path.abspath(__file__)), "TestDataForGeneratorTest.p") f = open(datafile, "r") atom.mock_service.LoadRecordings(f) f.close() self.user_names = [] for i in range(100): self.user_names += ['testuser-20080307140302-%03d' % i] self.user_names += ['tmatsuo'] self.user_num = 101 def tearDown(self): pass def test001GetGeneratorForAllUsers(self): """Tests GetGeneratorForAllUsers method""" generator = self.apps_client.GetGeneratorForAllUsers() i = 0 for user_feed in generator: for a_user in user_feed.entry: self.assert_(a_user.login.user_name == self.user_names[i]) i = i + 1 self.assert_(i == self.user_num) if __name__ == '__main__': unittest.main()
apache-2.0
shawnadelic/shuup
shuup/core/models/_service_payment.py
1
6664
# -*- coding: utf-8 -*- # This file is part of Shuup. # # Copyright (c) 2012-2016, Shoop Ltd. All rights reserved. # # This source code is licensed under the AGPLv3 license found in the # LICENSE file in the root directory of this source tree. from __future__ import unicode_literals, with_statement import decimal from django.db import models from django.http.response import HttpResponseRedirect from django.utils.timezone import now from django.utils.translation import ugettext_lazy as _ from enumfields import Enum, EnumField from parler.models import TranslatedFields from shuup.utils.analog import define_log_model from ._order_lines import OrderLineType from ._orders import Order, PaymentStatus from ._service_base import Service, ServiceChoice, ServiceProvider from ._service_behavior import StaffOnlyBehaviorComponent class PaymentMethod(Service): payment_processor = models.ForeignKey( "PaymentProcessor", null=True, blank=True, on_delete=models.SET_NULL, verbose_name=_("payment processor")) translations = TranslatedFields( name=models.CharField(max_length=100, verbose_name=_("name")), description=models.CharField( max_length=500, blank=True, verbose_name=_("description")), ) line_type = OrderLineType.PAYMENT provider_attr = 'payment_processor' shop_product_m2m = "payment_methods" class Meta: verbose_name = _("payment method") verbose_name_plural = _("payment methods") def can_delete(self): return not Order.objects.filter(payment_method=self).exists() def get_payment_process_response(self, order, urls): self._make_sure_is_usable() return self.provider.get_payment_process_response(self, order, urls) def process_payment_return_request(self, order, request): self._make_sure_is_usable() self.provider.process_payment_return_request(self, order, request) class PaymentProcessor(ServiceProvider): """ Service provider interface for payment processing. Services provided by a payment processor are `payment methods <PaymentMethod>`. To create a new payment method for a payment processor, use the `create_service` method. Implementers of this interface will provide provide a list of payment service choices and each related payment method should have one of those service choices assigned to it. Payment processing is handled with `get_payment_process_response` and `process_payment_return_request` methods. Note: `PaymentProcessor` objects should never be created on their own but rather through a concrete subclass. """ service_model = PaymentMethod def delete(self, *args, **kwargs): PaymentMethod.objects.filter(payment_processor=self).update(**{"enabled": False}) super(PaymentProcessor, self).delete(*args, **kwargs) def get_payment_process_response(self, service, order, urls): """ Get payment process response for given order. :type service: shuup.core.models.PaymentMethod :type order: shuup.core.models.Order :type urls: PaymentUrls :rtype: django.http.HttpResponse|None """ return HttpResponseRedirect(urls.return_url) def process_payment_return_request(self, service, order, request): """ Process payment return request for given order. Should set ``order.payment_status``. Default implementation just sets it to `~PaymentStatus.DEFERRED` if it is `~PaymentStatus.NOT_PAID`. :type service: shuup.core.models.PaymentMethod :type order: shuup.core.models.Order :type request: django.http.HttpRequest :rtype: None """ if order.payment_status == PaymentStatus.NOT_PAID: order.payment_status = PaymentStatus.DEFERRED order.add_log_entry("Payment status set to deferred by %s" % self) order.save(update_fields=("payment_status",)) def _create_service(self, choice_identifier, **kwargs): return PaymentMethod.objects.create( payment_processor=self, choice_identifier=choice_identifier, **kwargs) class PaymentUrls(object): """ Container for URLs used in payment processing. """ def __init__(self, payment_url, return_url, cancel_url): self.payment_url = payment_url self.return_url = return_url self.cancel_url = cancel_url class RoundingMode(Enum): ROUND_HALF_UP = decimal.ROUND_HALF_UP ROUND_HALF_DOWN = decimal.ROUND_HALF_DOWN ROUND_UP = decimal.ROUND_UP ROUND_DOWN = decimal.ROUND_DOWN class Labels: ROUND_HALF_UP = _("round to nearest with ties going away from zero") ROUND_HALF_DOWN = _("round to nearest with ties going towards zero") ROUND_UP = _("round away from zero") ROUND_DOWN = _("round towards zero") class CustomPaymentProcessor(PaymentProcessor): """ Payment processor without any integration or special processing. Can be used for payment methods whose payments are processed manually. """ rounding_quantize = models.DecimalField( max_digits=36, decimal_places=9, default=decimal.Decimal('0.05'), verbose_name=_("rounding quantize"), help_text=_("Rounding quantize for cash payment.")) rounding_mode = EnumField( RoundingMode, max_length=50, default=RoundingMode.ROUND_HALF_UP, verbose_name=_("rounding mode"), help_text=_("Rounding mode for cash payment.")) class Meta: verbose_name = _("custom payment processor") verbose_name_plural = _("custom payment processors") def get_service_choices(self): return [ ServiceChoice('manual', _("Manually processed payment")), ServiceChoice('cash', _("Cash payment")) ] def _create_service(self, choice_identifier, **kwargs): service = super(CustomPaymentProcessor, self)._create_service( choice_identifier, **kwargs) if choice_identifier == 'cash': service.behavior_components.add( StaffOnlyBehaviorComponent.objects.create()) return service def process_payment_return_request(self, service, order, request): if service == 'cash': if not order.is_paid(): order.create_payment( order.taxful_total_price, payment_identifier="Cash-%s" % now().isoformat(), description="Cash Payment" ) PaymentMethodLogEntry = define_log_model(PaymentMethod) PaymentProcessorLogEntry = define_log_model(PaymentProcessor)
agpl-3.0
yongtang/tensorflow
tensorflow/python/distribute/step_fn.py
18
4196
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """The step function abstraction represents a single training step.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.python.eager import backprop from tensorflow.python.training import optimizer as optimizer_lib class Step(object): """Interface for performing each step of a training algorithm.""" def __init__(self, distribution): self._distribution = distribution @property def distribution(self): return self._distribution def initialize(self): return [] def __call__(self): """Perform one step of this training algorithm.""" raise NotImplementedError("must be implemented in descendants") # TODO(priyag): Add an method to access initialization and finalize ops. class StandardInputStep(Step): """Step with a standard implementation of input handling. Args: dataset_fn: a function that returns a tf.data Dataset that produces the input for the model. """ def __init__(self, dataset_fn, distribution): super(StandardInputStep, self).__init__(distribution) self._iterator = distribution.make_input_fn_iterator(lambda _: dataset_fn()) def initialize(self): return self._iterator.initializer class StandardSingleLossStep(StandardInputStep): """A step function that implements a training step for a feed forward network. An instance of this class is intended to be used as a callable: ```python ... step = step_fn.StandardSingleLossStep( dataset, loss_fn, optimizer, distribution) # Run a single training step on a given DistributionStrategy: step(distribution) ... ``` Args: dataset_fn: a function that returns a tf.data Dataset that produces the input for the model. loss_fn: a function that takes a context and inputs as arguments. It returns the loss for those inputs. `context` is an instance of `values.MultiStepContext` that will be passed when `loss_fn` is run. `context` can be used to specify the outputs to be returned from `loss_fn`, among other things. optimizer: an optimizer that implements an update rule. distribution: a `DistributionStrategy` object. """ def __init__(self, dataset_fn, loss_fn, optimizer, distribution, iterations_per_step=1): super(StandardSingleLossStep, self).__init__(dataset_fn, distribution) self._loss_fn = loss_fn self._optimizer = optimizer self._iterations_per_step = iterations_per_step def __call__(self): with self._distribution.scope(): def step_fn(ctx, inputs): """Function to run one iteration with one input.""" gradients_fn = backprop.implicit_grad(self._loss_fn) gradients_fn = optimizer_lib.get_filtered_grad_fn(gradients_fn) grads_and_vars = self.distribution.extended.call_for_each_replica( gradients_fn, args=(ctx, inputs)) # If threads use layers, then we need to run the first step # sequentially, so that layers.build() is not executed in parallel. # Otherwise, multiple sets of mirrored variables are going to be # created. return self._optimizer._distributed_apply( # pylint: disable=protected-access self.distribution, grads_and_vars) # TODO(priyag): Return the outputs, context, etc as well. ctx = self.distribution.extended.experimental_run_steps_on_iterator( step_fn, self._iterator, self._iterations_per_step) return ctx.run_op
apache-2.0
mihaip/NewsBlur
vendor/feedvalidator/demo/src/rdflib/Literal.py
18
1969
from sys import version_info if version_info[0:2] > (2, 2): from unicodedata import normalize else: normalize = None from rdflib.Identifier import Identifier from rdflib.exceptions import Error class Literal(Identifier): """ http://www.w3.org/TR/rdf-concepts/#section-Graph-Literal """ def __new__(cls, value, lang='', datatype=''): value = unicode(value) return Identifier.__new__(cls, value) def __init__(self, value, lang='', datatype=''): if normalize and value: if not isinstance(value, unicode): value = unicode(value) if value != normalize("NFC", value): raise Error("value must be in NFC normalized form.") if datatype: lang = '' self.language = lang self.datatype = datatype def __add__(self, val): s = super(Literal, self).__add__(val) return Literal(s, self.language, self.datatype) def n3(self): language = self.language datatype = self.datatype encoded = self.encode('unicode-escape') if language: if datatype: return '"%s"@%s^^<%s>' % (encoded, language, datatype) else: return '"%s"@%s' % (encoded, language) else: if datatype: return '"%s"^^<%s>' % (encoded, datatype) else: return '"%s"' % encoded def __eq__(self, other): if other==None: return 0 elif isinstance(other, Literal): result = self.__cmp__(other)==0 if result==1: if self.language==other.language: return 1 else: return 0 else: return result elif isinstance(other, Identifier): return 0 else: return unicode(self)==other
mit
xrg/django-static-gitified
django/contrib/formtools/tests/forms.py
99
1164
from django import forms from django.contrib.formtools.wizard import FormWizard from django.http import HttpResponse class Page1(forms.Form): name = forms.CharField(max_length=100) thirsty = forms.NullBooleanField() class Page2(forms.Form): address1 = forms.CharField(max_length=100) address2 = forms.CharField(max_length=100) class Page3(forms.Form): random_crap = forms.CharField(max_length=100) class ContactWizard(FormWizard): def done(self, request, form_list): return HttpResponse("") class TestForm(forms.Form): field1 = forms.CharField() field1_ = forms.CharField() bool1 = forms.BooleanField(required=False) class HashTestForm(forms.Form): name = forms.CharField() bio = forms.CharField() class HashTestBlankForm(forms.Form): name = forms.CharField(required=False) bio = forms.CharField(required=False) class WizardPageOneForm(forms.Form): field = forms.CharField() class WizardPageTwoForm(forms.Form): field = forms.CharField() class WizardPageTwoAlternativeForm(forms.Form): field = forms.CharField() class WizardPageThreeForm(forms.Form): field = forms.CharField()
bsd-3-clause
ubgarbage/gae-blog
django/http/utils.py
200
3268
""" Functions that modify an HTTP request or response in some way. """ # This group of functions are run as part of the response handling, after # everything else, including all response middleware. Think of them as # "compulsory response middleware". Be careful about what goes here, because # it's a little fiddly to override this behavior, so they should be truly # universally applicable. def fix_location_header(request, response): """ Ensures that we always use an absolute URI in any location header in the response. This is required by RFC 2616, section 14.30. Code constructing response objects is free to insert relative paths, as this function converts them to absolute paths. """ if 'Location' in response and request.get_host(): response['Location'] = request.build_absolute_uri(response['Location']) return response def conditional_content_removal(request, response): """ Removes the content of responses for HEAD requests, 1xx, 204 and 304 responses. Ensures compliance with RFC 2616, section 4.3. """ if 100 <= response.status_code < 200 or response.status_code in (204, 304): response.content = '' response['Content-Length'] = 0 if request.method == 'HEAD': response.content = '' return response def fix_IE_for_attach(request, response): """ This function will prevent Django from serving a Content-Disposition header while expecting the browser to cache it (only when the browser is IE). This leads to IE not allowing the client to download. """ useragent = request.META.get('HTTP_USER_AGENT', '').upper() if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent: return response offending_headers = ('no-cache', 'no-store') if response.has_header('Content-Disposition'): try: del response['Pragma'] except KeyError: pass if response.has_header('Cache-Control'): cache_control_values = [value.strip() for value in response['Cache-Control'].split(',') if value.strip().lower() not in offending_headers] if not len(cache_control_values): del response['Cache-Control'] else: response['Cache-Control'] = ', '.join(cache_control_values) return response def fix_IE_for_vary(request, response): """ This function will fix the bug reported at http://support.microsoft.com/kb/824847/en-us?spid=8722&sid=global by clearing the Vary header whenever the mime-type is not safe enough for Internet Explorer to handle. Poor thing. """ useragent = request.META.get('HTTP_USER_AGENT', '').upper() if 'MSIE' not in useragent and 'CHROMEFRAME' not in useragent: return response # These mime-types that are decreed "Vary-safe" for IE: safe_mime_types = ('text/html', 'text/plain', 'text/sgml') # The first part of the Content-Type field will be the MIME type, # everything after ';', such as character-set, can be ignored. if response['Content-Type'].split(';')[0] not in safe_mime_types: try: del response['Vary'] except KeyError: pass return response
bsd-3-clause